GCCPY:
[official-gcc.git] / gcc / fold-const.c
blob5acb4ad005944fb7bc3e53d48118fe2ec40fa608
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "realmpfr.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "diagnostic-core.h"
55 #include "intl.h"
56 #include "ggc.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
59 #include "md5.h"
60 #include "gimple.h"
61 #include "tree-flow.h"
63 /* Nonzero if we are folding constants inside an initializer; zero
64 otherwise. */
65 int folding_initializer = 0;
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code {
71 COMPCODE_FALSE = 0,
72 COMPCODE_LT = 1,
73 COMPCODE_EQ = 2,
74 COMPCODE_LE = 3,
75 COMPCODE_GT = 4,
76 COMPCODE_LTGT = 5,
77 COMPCODE_GE = 6,
78 COMPCODE_ORD = 7,
79 COMPCODE_UNORD = 8,
80 COMPCODE_UNLT = 9,
81 COMPCODE_UNEQ = 10,
82 COMPCODE_UNLE = 11,
83 COMPCODE_UNGT = 12,
84 COMPCODE_NE = 13,
85 COMPCODE_UNGE = 14,
86 COMPCODE_TRUE = 15
89 static bool negate_mathfn_p (enum built_in_function);
90 static bool negate_expr_p (tree);
91 static tree negate_expr (tree);
92 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
93 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
94 static tree const_binop (enum tree_code, tree, tree);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
101 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (location_t, tree, tree,
103 HOST_WIDE_INT, HOST_WIDE_INT, int);
104 static tree optimize_bit_field_compare (location_t, enum tree_code,
105 tree, tree, tree);
106 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
107 HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (const_tree, int);
111 static tree sign_bit_p (tree, const_tree);
112 static int simple_operand_p (const_tree);
113 static bool simple_operand_p_2 (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree optimize_minmax_comparison (location_t, enum tree_code,
121 tree, tree, tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
124 static tree fold_binary_op_with_conditional_arg (location_t,
125 enum tree_code, tree,
126 tree, tree,
127 tree, tree, int);
128 static tree fold_mathfn_compare (location_t,
129 enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (const_tree, const_tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (const_tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_convert_const (enum tree_code, tree, tree);
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140 Otherwise, return LOC. */
142 static location_t
143 expr_location_or (tree t, location_t loc)
145 location_t tloc = EXPR_LOCATION (t);
146 return tloc == UNKNOWN_LOCATION ? loc : tloc;
149 /* Similar to protected_set_expr_location, but never modify x in place,
150 if location can and needs to be set, unshare it. */
152 static inline tree
153 protected_set_expr_location_unshare (tree x, location_t loc)
155 if (CAN_HAVE_LOCATION_P (x)
156 && EXPR_LOCATION (x) != loc
157 && !(TREE_CODE (x) == SAVE_EXPR
158 || TREE_CODE (x) == TARGET_EXPR
159 || TREE_CODE (x) == BIND_EXPR))
161 x = copy_node (x);
162 SET_EXPR_LOCATION (x, loc);
164 return x;
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168 of type CODE and returns the quotient.
169 Otherwise returns NULL_TREE. */
171 tree
172 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
174 double_int quo, rem;
175 int uns;
177 /* The sign of the division is according to operand two, that
178 does the correct thing for POINTER_PLUS_EXPR where we want
179 a signed division. */
180 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
182 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
183 uns, code, &rem);
185 if (rem.is_zero ())
186 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
188 return NULL_TREE;
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
198 used. */
200 static int fold_deferring_overflow_warnings;
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
207 static const char* fold_deferred_overflow_warning;
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
212 static enum warn_strict_overflow_code fold_deferred_overflow_code;
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
217 void
218 fold_defer_overflow_warnings (void)
220 ++fold_deferring_overflow_warnings;
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
230 deferred code. */
232 void
233 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
235 const char *warnmsg;
236 location_t locus;
238 gcc_assert (fold_deferring_overflow_warnings > 0);
239 --fold_deferring_overflow_warnings;
240 if (fold_deferring_overflow_warnings > 0)
242 if (fold_deferred_overflow_warning != NULL
243 && code != 0
244 && code < (int) fold_deferred_overflow_code)
245 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
246 return;
249 warnmsg = fold_deferred_overflow_warning;
250 fold_deferred_overflow_warning = NULL;
252 if (!issue || warnmsg == NULL)
253 return;
255 if (gimple_no_warning_p (stmt))
256 return;
258 /* Use the smallest code level when deciding to issue the
259 warning. */
260 if (code == 0 || code > (int) fold_deferred_overflow_code)
261 code = fold_deferred_overflow_code;
263 if (!issue_strict_overflow_warning (code))
264 return;
266 if (stmt == NULL)
267 locus = input_location;
268 else
269 locus = gimple_location (stmt);
270 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
273 /* Stop deferring overflow warnings, ignoring any deferred
274 warnings. */
276 void
277 fold_undefer_and_ignore_overflow_warnings (void)
279 fold_undefer_overflow_warnings (false, NULL, 0);
282 /* Whether we are deferring overflow warnings. */
284 bool
285 fold_deferring_overflow_warnings_p (void)
287 return fold_deferring_overflow_warnings > 0;
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
293 static void
294 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
296 if (fold_deferring_overflow_warnings > 0)
298 if (fold_deferred_overflow_warning == NULL
299 || wc < fold_deferred_overflow_code)
301 fold_deferred_overflow_warning = gmsgid;
302 fold_deferred_overflow_code = wc;
305 else if (issue_strict_overflow_warning (wc))
306 warning (OPT_Wstrict_overflow, gmsgid);
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
312 static bool
313 negate_mathfn_p (enum built_in_function code)
315 switch (code)
317 CASE_FLT_FN (BUILT_IN_ASIN):
318 CASE_FLT_FN (BUILT_IN_ASINH):
319 CASE_FLT_FN (BUILT_IN_ATAN):
320 CASE_FLT_FN (BUILT_IN_ATANH):
321 CASE_FLT_FN (BUILT_IN_CASIN):
322 CASE_FLT_FN (BUILT_IN_CASINH):
323 CASE_FLT_FN (BUILT_IN_CATAN):
324 CASE_FLT_FN (BUILT_IN_CATANH):
325 CASE_FLT_FN (BUILT_IN_CBRT):
326 CASE_FLT_FN (BUILT_IN_CPROJ):
327 CASE_FLT_FN (BUILT_IN_CSIN):
328 CASE_FLT_FN (BUILT_IN_CSINH):
329 CASE_FLT_FN (BUILT_IN_CTAN):
330 CASE_FLT_FN (BUILT_IN_CTANH):
331 CASE_FLT_FN (BUILT_IN_ERF):
332 CASE_FLT_FN (BUILT_IN_LLROUND):
333 CASE_FLT_FN (BUILT_IN_LROUND):
334 CASE_FLT_FN (BUILT_IN_ROUND):
335 CASE_FLT_FN (BUILT_IN_SIN):
336 CASE_FLT_FN (BUILT_IN_SINH):
337 CASE_FLT_FN (BUILT_IN_TAN):
338 CASE_FLT_FN (BUILT_IN_TANH):
339 CASE_FLT_FN (BUILT_IN_TRUNC):
340 return true;
342 CASE_FLT_FN (BUILT_IN_LLRINT):
343 CASE_FLT_FN (BUILT_IN_LRINT):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT):
345 CASE_FLT_FN (BUILT_IN_RINT):
346 return !flag_rounding_math;
348 default:
349 break;
351 return false;
354 /* Check whether we may negate an integer constant T without causing
355 overflow. */
357 bool
358 may_negate_without_overflow_p (const_tree t)
360 unsigned HOST_WIDE_INT val;
361 unsigned int prec;
362 tree type;
364 gcc_assert (TREE_CODE (t) == INTEGER_CST);
366 type = TREE_TYPE (t);
367 if (TYPE_UNSIGNED (type))
368 return false;
370 prec = TYPE_PRECISION (type);
371 if (prec > HOST_BITS_PER_WIDE_INT)
373 if (TREE_INT_CST_LOW (t) != 0)
374 return true;
375 prec -= HOST_BITS_PER_WIDE_INT;
376 val = TREE_INT_CST_HIGH (t);
378 else
379 val = TREE_INT_CST_LOW (t);
380 if (prec < HOST_BITS_PER_WIDE_INT)
381 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
382 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
388 static bool
389 negate_expr_p (tree t)
391 tree type;
393 if (t == 0)
394 return false;
396 type = TREE_TYPE (t);
398 STRIP_SIGN_NOPS (t);
399 switch (TREE_CODE (t))
401 case INTEGER_CST:
402 if (TYPE_OVERFLOW_WRAPS (type))
403 return true;
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
407 case BIT_NOT_EXPR:
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
411 case FIXED_CST:
412 case NEGATE_EXPR:
413 return true;
415 case REAL_CST:
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
420 case COMPLEX_CST:
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
433 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
434 return false;
435 /* -(A + B) -> (-B) - A. */
436 if (negate_expr_p (TREE_OPERAND (t, 1))
437 && reorder_operands_p (TREE_OPERAND (t, 0),
438 TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
446 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
447 && reorder_operands_p (TREE_OPERAND (t, 0),
448 TREE_OPERAND (t, 1));
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (TREE_TYPE (t)))
452 break;
454 /* Fall through. */
456 case RDIV_EXPR:
457 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
458 return negate_expr_p (TREE_OPERAND (t, 1))
459 || negate_expr_p (TREE_OPERAND (t, 0));
460 break;
462 case TRUNC_DIV_EXPR:
463 case ROUND_DIV_EXPR:
464 case FLOOR_DIV_EXPR:
465 case CEIL_DIV_EXPR:
466 case EXACT_DIV_EXPR:
467 /* In general we can't negate A / B, because if A is INT_MIN and
468 B is 1, we may turn this into INT_MIN / -1 which is undefined
469 and actually traps on some architectures. But if overflow is
470 undefined, we can negate, because - (INT_MIN / 1) is an
471 overflow. */
472 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
473 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
474 break;
475 return negate_expr_p (TREE_OPERAND (t, 1))
476 || negate_expr_p (TREE_OPERAND (t, 0));
478 case NOP_EXPR:
479 /* Negate -((double)float) as (double)(-float). */
480 if (TREE_CODE (type) == REAL_TYPE)
482 tree tem = strip_float_extensions (t);
483 if (tem != t)
484 return negate_expr_p (tem);
486 break;
488 case CALL_EXPR:
489 /* Negate -f(x) as f(-x). */
490 if (negate_mathfn_p (builtin_mathfn_code (t)))
491 return negate_expr_p (CALL_EXPR_ARG (t, 0));
492 break;
494 case RSHIFT_EXPR:
495 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
496 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
498 tree op1 = TREE_OPERAND (t, 1);
499 if (TREE_INT_CST_HIGH (op1) == 0
500 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
501 == TREE_INT_CST_LOW (op1))
502 return true;
504 break;
506 default:
507 break;
509 return false;
512 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
513 simplification is possible.
514 If negate_expr_p would return true for T, NULL_TREE will never be
515 returned. */
517 static tree
518 fold_negate_expr (location_t loc, tree t)
520 tree type = TREE_TYPE (t);
521 tree tem;
523 switch (TREE_CODE (t))
525 /* Convert - (~A) to A + 1. */
526 case BIT_NOT_EXPR:
527 if (INTEGRAL_TYPE_P (type))
528 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
529 build_int_cst (type, 1));
530 break;
532 case INTEGER_CST:
533 tem = fold_negate_const (t, type);
534 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
535 || !TYPE_OVERFLOW_TRAPS (type))
536 return tem;
537 break;
539 case REAL_CST:
540 tem = fold_negate_const (t, type);
541 /* Two's complement FP formats, such as c4x, may overflow. */
542 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
543 return tem;
544 break;
546 case FIXED_CST:
547 tem = fold_negate_const (t, type);
548 return tem;
550 case COMPLEX_CST:
552 tree rpart = negate_expr (TREE_REALPART (t));
553 tree ipart = negate_expr (TREE_IMAGPART (t));
555 if ((TREE_CODE (rpart) == REAL_CST
556 && TREE_CODE (ipart) == REAL_CST)
557 || (TREE_CODE (rpart) == INTEGER_CST
558 && TREE_CODE (ipart) == INTEGER_CST))
559 return build_complex (type, rpart, ipart);
561 break;
563 case COMPLEX_EXPR:
564 if (negate_expr_p (t))
565 return fold_build2_loc (loc, COMPLEX_EXPR, type,
566 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
567 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
568 break;
570 case CONJ_EXPR:
571 if (negate_expr_p (t))
572 return fold_build1_loc (loc, CONJ_EXPR, type,
573 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
574 break;
576 case NEGATE_EXPR:
577 return TREE_OPERAND (t, 0);
579 case PLUS_EXPR:
580 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
581 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
583 /* -(A + B) -> (-B) - A. */
584 if (negate_expr_p (TREE_OPERAND (t, 1))
585 && reorder_operands_p (TREE_OPERAND (t, 0),
586 TREE_OPERAND (t, 1)))
588 tem = negate_expr (TREE_OPERAND (t, 1));
589 return fold_build2_loc (loc, MINUS_EXPR, type,
590 tem, TREE_OPERAND (t, 0));
593 /* -(A + B) -> (-A) - B. */
594 if (negate_expr_p (TREE_OPERAND (t, 0)))
596 tem = negate_expr (TREE_OPERAND (t, 0));
597 return fold_build2_loc (loc, MINUS_EXPR, type,
598 tem, TREE_OPERAND (t, 1));
601 break;
603 case MINUS_EXPR:
604 /* - (A - B) -> B - A */
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
606 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
607 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
608 return fold_build2_loc (loc, MINUS_EXPR, type,
609 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
610 break;
612 case MULT_EXPR:
613 if (TYPE_UNSIGNED (type))
614 break;
616 /* Fall through. */
618 case RDIV_EXPR:
619 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
621 tem = TREE_OPERAND (t, 1);
622 if (negate_expr_p (tem))
623 return fold_build2_loc (loc, TREE_CODE (t), type,
624 TREE_OPERAND (t, 0), negate_expr (tem));
625 tem = TREE_OPERAND (t, 0);
626 if (negate_expr_p (tem))
627 return fold_build2_loc (loc, TREE_CODE (t), type,
628 negate_expr (tem), TREE_OPERAND (t, 1));
630 break;
632 case TRUNC_DIV_EXPR:
633 case ROUND_DIV_EXPR:
634 case FLOOR_DIV_EXPR:
635 case CEIL_DIV_EXPR:
636 case EXACT_DIV_EXPR:
637 /* In general we can't negate A / B, because if A is INT_MIN and
638 B is 1, we may turn this into INT_MIN / -1 which is undefined
639 and actually traps on some architectures. But if overflow is
640 undefined, we can negate, because - (INT_MIN / 1) is an
641 overflow. */
642 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
644 const char * const warnmsg = G_("assuming signed overflow does not "
645 "occur when negating a division");
646 tem = TREE_OPERAND (t, 1);
647 if (negate_expr_p (tem))
649 if (INTEGRAL_TYPE_P (type)
650 && (TREE_CODE (tem) != INTEGER_CST
651 || integer_onep (tem)))
652 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
653 return fold_build2_loc (loc, TREE_CODE (t), type,
654 TREE_OPERAND (t, 0), negate_expr (tem));
656 tem = TREE_OPERAND (t, 0);
657 if (negate_expr_p (tem))
659 if (INTEGRAL_TYPE_P (type)
660 && (TREE_CODE (tem) != INTEGER_CST
661 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
662 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
663 return fold_build2_loc (loc, TREE_CODE (t), type,
664 negate_expr (tem), TREE_OPERAND (t, 1));
667 break;
669 case NOP_EXPR:
670 /* Convert -((double)float) into (double)(-float). */
671 if (TREE_CODE (type) == REAL_TYPE)
673 tem = strip_float_extensions (t);
674 if (tem != t && negate_expr_p (tem))
675 return fold_convert_loc (loc, type, negate_expr (tem));
677 break;
679 case CALL_EXPR:
680 /* Negate -f(x) as f(-x). */
681 if (negate_mathfn_p (builtin_mathfn_code (t))
682 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
684 tree fndecl, arg;
686 fndecl = get_callee_fndecl (t);
687 arg = negate_expr (CALL_EXPR_ARG (t, 0));
688 return build_call_expr_loc (loc, fndecl, 1, arg);
690 break;
692 case RSHIFT_EXPR:
693 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
694 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
696 tree op1 = TREE_OPERAND (t, 1);
697 if (TREE_INT_CST_HIGH (op1) == 0
698 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
699 == TREE_INT_CST_LOW (op1))
701 tree ntype = TYPE_UNSIGNED (type)
702 ? signed_type_for (type)
703 : unsigned_type_for (type);
704 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
705 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
706 return fold_convert_loc (loc, type, temp);
709 break;
711 default:
712 break;
715 return NULL_TREE;
718 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
719 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
720 return NULL_TREE. */
722 static tree
723 negate_expr (tree t)
725 tree type, tem;
726 location_t loc;
728 if (t == NULL_TREE)
729 return NULL_TREE;
731 loc = EXPR_LOCATION (t);
732 type = TREE_TYPE (t);
733 STRIP_SIGN_NOPS (t);
735 tem = fold_negate_expr (loc, t);
736 if (!tem)
737 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
738 return fold_convert_loc (loc, type, tem);
741 /* Split a tree IN into a constant, literal and variable parts that could be
742 combined with CODE to make IN. "constant" means an expression with
743 TREE_CONSTANT but that isn't an actual constant. CODE must be a
744 commutative arithmetic operation. Store the constant part into *CONP,
745 the literal in *LITP and return the variable part. If a part isn't
746 present, set it to null. If the tree does not decompose in this way,
747 return the entire tree as the variable part and the other parts as null.
749 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
750 case, we negate an operand that was subtracted. Except if it is a
751 literal for which we use *MINUS_LITP instead.
753 If NEGATE_P is true, we are negating all of IN, again except a literal
754 for which we use *MINUS_LITP instead.
756 If IN is itself a literal or constant, return it as appropriate.
758 Note that we do not guarantee that any of the three values will be the
759 same type as IN, but they will have the same signedness and mode. */
761 static tree
762 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
763 tree *minus_litp, int negate_p)
765 tree var = 0;
767 *conp = 0;
768 *litp = 0;
769 *minus_litp = 0;
771 /* Strip any conversions that don't change the machine mode or signedness. */
772 STRIP_SIGN_NOPS (in);
774 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
775 || TREE_CODE (in) == FIXED_CST)
776 *litp = in;
777 else if (TREE_CODE (in) == code
778 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
779 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
780 /* We can associate addition and subtraction together (even
781 though the C standard doesn't say so) for integers because
782 the value is not affected. For reals, the value might be
783 affected, so we can't. */
784 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
785 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
787 tree op0 = TREE_OPERAND (in, 0);
788 tree op1 = TREE_OPERAND (in, 1);
789 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
790 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
792 /* First see if either of the operands is a literal, then a constant. */
793 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
794 || TREE_CODE (op0) == FIXED_CST)
795 *litp = op0, op0 = 0;
796 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
797 || TREE_CODE (op1) == FIXED_CST)
798 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
800 if (op0 != 0 && TREE_CONSTANT (op0))
801 *conp = op0, op0 = 0;
802 else if (op1 != 0 && TREE_CONSTANT (op1))
803 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
805 /* If we haven't dealt with either operand, this is not a case we can
806 decompose. Otherwise, VAR is either of the ones remaining, if any. */
807 if (op0 != 0 && op1 != 0)
808 var = in;
809 else if (op0 != 0)
810 var = op0;
811 else
812 var = op1, neg_var_p = neg1_p;
814 /* Now do any needed negations. */
815 if (neg_litp_p)
816 *minus_litp = *litp, *litp = 0;
817 if (neg_conp_p)
818 *conp = negate_expr (*conp);
819 if (neg_var_p)
820 var = negate_expr (var);
822 else if (TREE_CODE (in) == BIT_NOT_EXPR
823 && code == PLUS_EXPR)
825 /* -X - 1 is folded to ~X, undo that here. */
826 *minus_litp = build_one_cst (TREE_TYPE (in));
827 var = negate_expr (TREE_OPERAND (in, 0));
829 else if (TREE_CONSTANT (in))
830 *conp = in;
831 else
832 var = in;
834 if (negate_p)
836 if (*litp)
837 *minus_litp = *litp, *litp = 0;
838 else if (*minus_litp)
839 *litp = *minus_litp, *minus_litp = 0;
840 *conp = negate_expr (*conp);
841 var = negate_expr (var);
844 return var;
847 /* Re-associate trees split by the above function. T1 and T2 are
848 either expressions to associate or null. Return the new
849 expression, if any. LOC is the location of the new expression. If
850 we build an operation, do it in TYPE and with CODE. */
852 static tree
853 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
855 if (t1 == 0)
856 return t2;
857 else if (t2 == 0)
858 return t1;
860 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
861 try to fold this since we will have infinite recursion. But do
862 deal with any NEGATE_EXPRs. */
863 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
864 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
866 if (code == PLUS_EXPR)
868 if (TREE_CODE (t1) == NEGATE_EXPR)
869 return build2_loc (loc, MINUS_EXPR, type,
870 fold_convert_loc (loc, type, t2),
871 fold_convert_loc (loc, type,
872 TREE_OPERAND (t1, 0)));
873 else if (TREE_CODE (t2) == NEGATE_EXPR)
874 return build2_loc (loc, MINUS_EXPR, type,
875 fold_convert_loc (loc, type, t1),
876 fold_convert_loc (loc, type,
877 TREE_OPERAND (t2, 0)));
878 else if (integer_zerop (t2))
879 return fold_convert_loc (loc, type, t1);
881 else if (code == MINUS_EXPR)
883 if (integer_zerop (t2))
884 return fold_convert_loc (loc, type, t1);
887 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
888 fold_convert_loc (loc, type, t2));
891 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
892 fold_convert_loc (loc, type, t2));
895 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
896 for use in int_const_binop, size_binop and size_diffop. */
898 static bool
899 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
901 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
902 return false;
903 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
904 return false;
906 switch (code)
908 case LSHIFT_EXPR:
909 case RSHIFT_EXPR:
910 case LROTATE_EXPR:
911 case RROTATE_EXPR:
912 return true;
914 default:
915 break;
918 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
919 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
920 && TYPE_MODE (type1) == TYPE_MODE (type2);
924 /* Combine two integer constants ARG1 and ARG2 under operation CODE
925 to produce a new constant. Return NULL_TREE if we don't know how
926 to evaluate CODE at compile-time. */
928 static tree
929 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
930 int overflowable)
932 double_int op1, op2, res, tmp;
933 tree t;
934 tree type = TREE_TYPE (arg1);
935 bool uns = TYPE_UNSIGNED (type);
936 bool overflow = false;
938 op1 = tree_to_double_int (arg1);
939 op2 = tree_to_double_int (arg2);
941 switch (code)
943 case BIT_IOR_EXPR:
944 res = op1 | op2;
945 break;
947 case BIT_XOR_EXPR:
948 res = op1 ^ op2;
949 break;
951 case BIT_AND_EXPR:
952 res = op1 & op2;
953 break;
955 case RSHIFT_EXPR:
956 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
957 break;
959 case LSHIFT_EXPR:
960 /* It's unclear from the C standard whether shifts can overflow.
961 The following code ignores overflow; perhaps a C standard
962 interpretation ruling is needed. */
963 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
964 break;
966 case RROTATE_EXPR:
967 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
968 break;
970 case LROTATE_EXPR:
971 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
972 break;
974 case PLUS_EXPR:
975 res = op1.add_with_sign (op2, false, &overflow);
976 break;
978 case MINUS_EXPR:
979 res = op1.sub_with_overflow (op2, &overflow);
980 break;
982 case MULT_EXPR:
983 res = op1.mul_with_sign (op2, false, &overflow);
984 break;
986 case MULT_HIGHPART_EXPR:
987 /* ??? Need quad precision, or an additional shift operand
988 to the multiply primitive, to handle very large highparts. */
989 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
990 return NULL_TREE;
991 tmp = op1 - op2;
992 res = tmp.rshift (TYPE_PRECISION (type), TYPE_PRECISION (type), !uns);
993 break;
995 case TRUNC_DIV_EXPR:
996 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
997 case EXACT_DIV_EXPR:
998 /* This is a shortcut for a common special case. */
999 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1000 && !TREE_OVERFLOW (arg1)
1001 && !TREE_OVERFLOW (arg2)
1002 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1004 if (code == CEIL_DIV_EXPR)
1005 op1.low += op2.low - 1;
1007 res.low = op1.low / op2.low, res.high = 0;
1008 break;
1011 /* ... fall through ... */
1013 case ROUND_DIV_EXPR:
1014 if (op2.is_zero ())
1015 return NULL_TREE;
1016 if (op2.is_one ())
1018 res = op1;
1019 break;
1021 if (op1 == op2 && !op1.is_zero ())
1023 res = double_int_one;
1024 break;
1026 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1027 break;
1029 case TRUNC_MOD_EXPR:
1030 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1031 /* This is a shortcut for a common special case. */
1032 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1033 && !TREE_OVERFLOW (arg1)
1034 && !TREE_OVERFLOW (arg2)
1035 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1037 if (code == CEIL_MOD_EXPR)
1038 op1.low += op2.low - 1;
1039 res.low = op1.low % op2.low, res.high = 0;
1040 break;
1043 /* ... fall through ... */
1045 case ROUND_MOD_EXPR:
1046 if (op2.is_zero ())
1047 return NULL_TREE;
1048 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1049 break;
1051 case MIN_EXPR:
1052 res = op1.min (op2, uns);
1053 break;
1055 case MAX_EXPR:
1056 res = op1.max (op2, uns);
1057 break;
1059 default:
1060 return NULL_TREE;
1063 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1064 (!uns && overflow)
1065 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1067 return t;
1070 tree
1071 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1073 return int_const_binop_1 (code, arg1, arg2, 1);
1076 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1077 constant. We assume ARG1 and ARG2 have the same data type, or at least
1078 are the same kind of constant and the same machine mode. Return zero if
1079 combining the constants is not allowed in the current operating mode. */
1081 static tree
1082 const_binop (enum tree_code code, tree arg1, tree arg2)
1084 /* Sanity check for the recursive cases. */
1085 if (!arg1 || !arg2)
1086 return NULL_TREE;
1088 STRIP_NOPS (arg1);
1089 STRIP_NOPS (arg2);
1091 if (TREE_CODE (arg1) == INTEGER_CST)
1092 return int_const_binop (code, arg1, arg2);
1094 if (TREE_CODE (arg1) == REAL_CST)
1096 enum machine_mode mode;
1097 REAL_VALUE_TYPE d1;
1098 REAL_VALUE_TYPE d2;
1099 REAL_VALUE_TYPE value;
1100 REAL_VALUE_TYPE result;
1101 bool inexact;
1102 tree t, type;
1104 /* The following codes are handled by real_arithmetic. */
1105 switch (code)
1107 case PLUS_EXPR:
1108 case MINUS_EXPR:
1109 case MULT_EXPR:
1110 case RDIV_EXPR:
1111 case MIN_EXPR:
1112 case MAX_EXPR:
1113 break;
1115 default:
1116 return NULL_TREE;
1119 d1 = TREE_REAL_CST (arg1);
1120 d2 = TREE_REAL_CST (arg2);
1122 type = TREE_TYPE (arg1);
1123 mode = TYPE_MODE (type);
1125 /* Don't perform operation if we honor signaling NaNs and
1126 either operand is a NaN. */
1127 if (HONOR_SNANS (mode)
1128 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1129 return NULL_TREE;
1131 /* Don't perform operation if it would raise a division
1132 by zero exception. */
1133 if (code == RDIV_EXPR
1134 && REAL_VALUES_EQUAL (d2, dconst0)
1135 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1136 return NULL_TREE;
1138 /* If either operand is a NaN, just return it. Otherwise, set up
1139 for floating-point trap; we return an overflow. */
1140 if (REAL_VALUE_ISNAN (d1))
1141 return arg1;
1142 else if (REAL_VALUE_ISNAN (d2))
1143 return arg2;
1145 inexact = real_arithmetic (&value, code, &d1, &d2);
1146 real_convert (&result, mode, &value);
1148 /* Don't constant fold this floating point operation if
1149 the result has overflowed and flag_trapping_math. */
1150 if (flag_trapping_math
1151 && MODE_HAS_INFINITIES (mode)
1152 && REAL_VALUE_ISINF (result)
1153 && !REAL_VALUE_ISINF (d1)
1154 && !REAL_VALUE_ISINF (d2))
1155 return NULL_TREE;
1157 /* Don't constant fold this floating point operation if the
1158 result may dependent upon the run-time rounding mode and
1159 flag_rounding_math is set, or if GCC's software emulation
1160 is unable to accurately represent the result. */
1161 if ((flag_rounding_math
1162 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1163 && (inexact || !real_identical (&result, &value)))
1164 return NULL_TREE;
1166 t = build_real (type, result);
1168 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1169 return t;
1172 if (TREE_CODE (arg1) == FIXED_CST)
1174 FIXED_VALUE_TYPE f1;
1175 FIXED_VALUE_TYPE f2;
1176 FIXED_VALUE_TYPE result;
1177 tree t, type;
1178 int sat_p;
1179 bool overflow_p;
1181 /* The following codes are handled by fixed_arithmetic. */
1182 switch (code)
1184 case PLUS_EXPR:
1185 case MINUS_EXPR:
1186 case MULT_EXPR:
1187 case TRUNC_DIV_EXPR:
1188 f2 = TREE_FIXED_CST (arg2);
1189 break;
1191 case LSHIFT_EXPR:
1192 case RSHIFT_EXPR:
1193 f2.data.high = TREE_INT_CST_HIGH (arg2);
1194 f2.data.low = TREE_INT_CST_LOW (arg2);
1195 f2.mode = SImode;
1196 break;
1198 default:
1199 return NULL_TREE;
1202 f1 = TREE_FIXED_CST (arg1);
1203 type = TREE_TYPE (arg1);
1204 sat_p = TYPE_SATURATING (type);
1205 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1206 t = build_fixed (type, result);
1207 /* Propagate overflow flags. */
1208 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1209 TREE_OVERFLOW (t) = 1;
1210 return t;
1213 if (TREE_CODE (arg1) == COMPLEX_CST)
1215 tree type = TREE_TYPE (arg1);
1216 tree r1 = TREE_REALPART (arg1);
1217 tree i1 = TREE_IMAGPART (arg1);
1218 tree r2 = TREE_REALPART (arg2);
1219 tree i2 = TREE_IMAGPART (arg2);
1220 tree real, imag;
1222 switch (code)
1224 case PLUS_EXPR:
1225 case MINUS_EXPR:
1226 real = const_binop (code, r1, r2);
1227 imag = const_binop (code, i1, i2);
1228 break;
1230 case MULT_EXPR:
1231 if (COMPLEX_FLOAT_TYPE_P (type))
1232 return do_mpc_arg2 (arg1, arg2, type,
1233 /* do_nonfinite= */ folding_initializer,
1234 mpc_mul);
1236 real = const_binop (MINUS_EXPR,
1237 const_binop (MULT_EXPR, r1, r2),
1238 const_binop (MULT_EXPR, i1, i2));
1239 imag = const_binop (PLUS_EXPR,
1240 const_binop (MULT_EXPR, r1, i2),
1241 const_binop (MULT_EXPR, i1, r2));
1242 break;
1244 case RDIV_EXPR:
1245 if (COMPLEX_FLOAT_TYPE_P (type))
1246 return do_mpc_arg2 (arg1, arg2, type,
1247 /* do_nonfinite= */ folding_initializer,
1248 mpc_div);
1249 /* Fallthru ... */
1250 case TRUNC_DIV_EXPR:
1251 case CEIL_DIV_EXPR:
1252 case FLOOR_DIV_EXPR:
1253 case ROUND_DIV_EXPR:
1254 if (flag_complex_method == 0)
1256 /* Keep this algorithm in sync with
1257 tree-complex.c:expand_complex_div_straight().
1259 Expand complex division to scalars, straightforward algorithm.
1260 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1261 t = br*br + bi*bi
1263 tree magsquared
1264 = const_binop (PLUS_EXPR,
1265 const_binop (MULT_EXPR, r2, r2),
1266 const_binop (MULT_EXPR, i2, i2));
1267 tree t1
1268 = const_binop (PLUS_EXPR,
1269 const_binop (MULT_EXPR, r1, r2),
1270 const_binop (MULT_EXPR, i1, i2));
1271 tree t2
1272 = const_binop (MINUS_EXPR,
1273 const_binop (MULT_EXPR, i1, r2),
1274 const_binop (MULT_EXPR, r1, i2));
1276 real = const_binop (code, t1, magsquared);
1277 imag = const_binop (code, t2, magsquared);
1279 else
1281 /* Keep this algorithm in sync with
1282 tree-complex.c:expand_complex_div_wide().
1284 Expand complex division to scalars, modified algorithm to minimize
1285 overflow with wide input ranges. */
1286 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1287 fold_abs_const (r2, TREE_TYPE (type)),
1288 fold_abs_const (i2, TREE_TYPE (type)));
1290 if (integer_nonzerop (compare))
1292 /* In the TRUE branch, we compute
1293 ratio = br/bi;
1294 div = (br * ratio) + bi;
1295 tr = (ar * ratio) + ai;
1296 ti = (ai * ratio) - ar;
1297 tr = tr / div;
1298 ti = ti / div; */
1299 tree ratio = const_binop (code, r2, i2);
1300 tree div = const_binop (PLUS_EXPR, i2,
1301 const_binop (MULT_EXPR, r2, ratio));
1302 real = const_binop (MULT_EXPR, r1, ratio);
1303 real = const_binop (PLUS_EXPR, real, i1);
1304 real = const_binop (code, real, div);
1306 imag = const_binop (MULT_EXPR, i1, ratio);
1307 imag = const_binop (MINUS_EXPR, imag, r1);
1308 imag = const_binop (code, imag, div);
1310 else
1312 /* In the FALSE branch, we compute
1313 ratio = d/c;
1314 divisor = (d * ratio) + c;
1315 tr = (b * ratio) + a;
1316 ti = b - (a * ratio);
1317 tr = tr / div;
1318 ti = ti / div; */
1319 tree ratio = const_binop (code, i2, r2);
1320 tree div = const_binop (PLUS_EXPR, r2,
1321 const_binop (MULT_EXPR, i2, ratio));
1323 real = const_binop (MULT_EXPR, i1, ratio);
1324 real = const_binop (PLUS_EXPR, real, r1);
1325 real = const_binop (code, real, div);
1327 imag = const_binop (MULT_EXPR, r1, ratio);
1328 imag = const_binop (MINUS_EXPR, i1, imag);
1329 imag = const_binop (code, imag, div);
1332 break;
1334 default:
1335 return NULL_TREE;
1338 if (real && imag)
1339 return build_complex (type, real, imag);
1342 if (TREE_CODE (arg1) == VECTOR_CST
1343 && TREE_CODE (arg2) == VECTOR_CST)
1345 tree type = TREE_TYPE(arg1);
1346 int count = TYPE_VECTOR_SUBPARTS (type), i;
1347 tree *elts = XALLOCAVEC (tree, count);
1349 for (i = 0; i < count; i++)
1351 tree elem1 = VECTOR_CST_ELT (arg1, i);
1352 tree elem2 = VECTOR_CST_ELT (arg2, i);
1354 elts[i] = const_binop (code, elem1, elem2);
1356 /* It is possible that const_binop cannot handle the given
1357 code and return NULL_TREE */
1358 if(elts[i] == NULL_TREE)
1359 return NULL_TREE;
1362 return build_vector (type, elts);
1364 return NULL_TREE;
1367 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1368 indicates which particular sizetype to create. */
1370 tree
1371 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1373 return build_int_cst (sizetype_tab[(int) kind], number);
1376 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1377 is a tree code. The type of the result is taken from the operands.
1378 Both must be equivalent integer types, ala int_binop_types_match_p.
1379 If the operands are constant, so is the result. */
1381 tree
1382 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1384 tree type = TREE_TYPE (arg0);
1386 if (arg0 == error_mark_node || arg1 == error_mark_node)
1387 return error_mark_node;
1389 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1390 TREE_TYPE (arg1)));
1392 /* Handle the special case of two integer constants faster. */
1393 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1395 /* And some specific cases even faster than that. */
1396 if (code == PLUS_EXPR)
1398 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1399 return arg1;
1400 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1401 return arg0;
1403 else if (code == MINUS_EXPR)
1405 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1406 return arg0;
1408 else if (code == MULT_EXPR)
1410 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1411 return arg1;
1414 /* Handle general case of two integer constants. For sizetype
1415 constant calculations we always want to know about overflow,
1416 even in the unsigned case. */
1417 return int_const_binop_1 (code, arg0, arg1, -1);
1420 return fold_build2_loc (loc, code, type, arg0, arg1);
1423 /* Given two values, either both of sizetype or both of bitsizetype,
1424 compute the difference between the two values. Return the value
1425 in signed type corresponding to the type of the operands. */
1427 tree
1428 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1430 tree type = TREE_TYPE (arg0);
1431 tree ctype;
1433 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1434 TREE_TYPE (arg1)));
1436 /* If the type is already signed, just do the simple thing. */
1437 if (!TYPE_UNSIGNED (type))
1438 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1440 if (type == sizetype)
1441 ctype = ssizetype;
1442 else if (type == bitsizetype)
1443 ctype = sbitsizetype;
1444 else
1445 ctype = signed_type_for (type);
1447 /* If either operand is not a constant, do the conversions to the signed
1448 type and subtract. The hardware will do the right thing with any
1449 overflow in the subtraction. */
1450 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1451 return size_binop_loc (loc, MINUS_EXPR,
1452 fold_convert_loc (loc, ctype, arg0),
1453 fold_convert_loc (loc, ctype, arg1));
1455 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1456 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1457 overflow) and negate (which can't either). Special-case a result
1458 of zero while we're here. */
1459 if (tree_int_cst_equal (arg0, arg1))
1460 return build_int_cst (ctype, 0);
1461 else if (tree_int_cst_lt (arg1, arg0))
1462 return fold_convert_loc (loc, ctype,
1463 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1464 else
1465 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1466 fold_convert_loc (loc, ctype,
1467 size_binop_loc (loc,
1468 MINUS_EXPR,
1469 arg1, arg0)));
1472 /* A subroutine of fold_convert_const handling conversions of an
1473 INTEGER_CST to another integer type. */
1475 static tree
1476 fold_convert_const_int_from_int (tree type, const_tree arg1)
1478 tree t;
1480 /* Given an integer constant, make new constant with new type,
1481 appropriately sign-extended or truncated. */
1482 t = force_fit_type_double (type, tree_to_double_int (arg1),
1483 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1484 (TREE_INT_CST_HIGH (arg1) < 0
1485 && (TYPE_UNSIGNED (type)
1486 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1487 | TREE_OVERFLOW (arg1));
1489 return t;
1492 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1493 to an integer type. */
1495 static tree
1496 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1498 int overflow = 0;
1499 tree t;
1501 /* The following code implements the floating point to integer
1502 conversion rules required by the Java Language Specification,
1503 that IEEE NaNs are mapped to zero and values that overflow
1504 the target precision saturate, i.e. values greater than
1505 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1506 are mapped to INT_MIN. These semantics are allowed by the
1507 C and C++ standards that simply state that the behavior of
1508 FP-to-integer conversion is unspecified upon overflow. */
1510 double_int val;
1511 REAL_VALUE_TYPE r;
1512 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1514 switch (code)
1516 case FIX_TRUNC_EXPR:
1517 real_trunc (&r, VOIDmode, &x);
1518 break;
1520 default:
1521 gcc_unreachable ();
1524 /* If R is NaN, return zero and show we have an overflow. */
1525 if (REAL_VALUE_ISNAN (r))
1527 overflow = 1;
1528 val = double_int_zero;
1531 /* See if R is less than the lower bound or greater than the
1532 upper bound. */
1534 if (! overflow)
1536 tree lt = TYPE_MIN_VALUE (type);
1537 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1538 if (REAL_VALUES_LESS (r, l))
1540 overflow = 1;
1541 val = tree_to_double_int (lt);
1545 if (! overflow)
1547 tree ut = TYPE_MAX_VALUE (type);
1548 if (ut)
1550 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1551 if (REAL_VALUES_LESS (u, r))
1553 overflow = 1;
1554 val = tree_to_double_int (ut);
1559 if (! overflow)
1560 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1562 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1563 return t;
1566 /* A subroutine of fold_convert_const handling conversions of a
1567 FIXED_CST to an integer type. */
1569 static tree
1570 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1572 tree t;
1573 double_int temp, temp_trunc;
1574 unsigned int mode;
1576 /* Right shift FIXED_CST to temp by fbit. */
1577 temp = TREE_FIXED_CST (arg1).data;
1578 mode = TREE_FIXED_CST (arg1).mode;
1579 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1581 temp = temp.rshift (GET_MODE_FBIT (mode),
1582 HOST_BITS_PER_DOUBLE_INT,
1583 SIGNED_FIXED_POINT_MODE_P (mode));
1585 /* Left shift temp to temp_trunc by fbit. */
1586 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1587 HOST_BITS_PER_DOUBLE_INT,
1588 SIGNED_FIXED_POINT_MODE_P (mode));
1590 else
1592 temp = double_int_zero;
1593 temp_trunc = double_int_zero;
1596 /* If FIXED_CST is negative, we need to round the value toward 0.
1597 By checking if the fractional bits are not zero to add 1 to temp. */
1598 if (SIGNED_FIXED_POINT_MODE_P (mode)
1599 && temp_trunc.is_negative ()
1600 && TREE_FIXED_CST (arg1).data != temp_trunc)
1601 temp += double_int_one;
1603 /* Given a fixed-point constant, make new constant with new type,
1604 appropriately sign-extended or truncated. */
1605 t = force_fit_type_double (type, temp, -1,
1606 (temp.is_negative ()
1607 && (TYPE_UNSIGNED (type)
1608 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1609 | TREE_OVERFLOW (arg1));
1611 return t;
1614 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1615 to another floating point type. */
1617 static tree
1618 fold_convert_const_real_from_real (tree type, const_tree arg1)
1620 REAL_VALUE_TYPE value;
1621 tree t;
1623 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1624 t = build_real (type, value);
1626 /* If converting an infinity or NAN to a representation that doesn't
1627 have one, set the overflow bit so that we can produce some kind of
1628 error message at the appropriate point if necessary. It's not the
1629 most user-friendly message, but it's better than nothing. */
1630 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1631 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1632 TREE_OVERFLOW (t) = 1;
1633 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1634 && !MODE_HAS_NANS (TYPE_MODE (type)))
1635 TREE_OVERFLOW (t) = 1;
1636 /* Regular overflow, conversion produced an infinity in a mode that
1637 can't represent them. */
1638 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1639 && REAL_VALUE_ISINF (value)
1640 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1641 TREE_OVERFLOW (t) = 1;
1642 else
1643 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1644 return t;
1647 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1648 to a floating point type. */
1650 static tree
1651 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1653 REAL_VALUE_TYPE value;
1654 tree t;
1656 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1657 t = build_real (type, value);
1659 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1660 return t;
1663 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1664 to another fixed-point type. */
1666 static tree
1667 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1669 FIXED_VALUE_TYPE value;
1670 tree t;
1671 bool overflow_p;
1673 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1674 TYPE_SATURATING (type));
1675 t = build_fixed (type, value);
1677 /* Propagate overflow flags. */
1678 if (overflow_p | TREE_OVERFLOW (arg1))
1679 TREE_OVERFLOW (t) = 1;
1680 return t;
1683 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1684 to a fixed-point type. */
1686 static tree
1687 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1689 FIXED_VALUE_TYPE value;
1690 tree t;
1691 bool overflow_p;
1693 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1694 TREE_INT_CST (arg1),
1695 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1696 TYPE_SATURATING (type));
1697 t = build_fixed (type, value);
1699 /* Propagate overflow flags. */
1700 if (overflow_p | TREE_OVERFLOW (arg1))
1701 TREE_OVERFLOW (t) = 1;
1702 return t;
1705 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1706 to a fixed-point type. */
1708 static tree
1709 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1711 FIXED_VALUE_TYPE value;
1712 tree t;
1713 bool overflow_p;
1715 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1716 &TREE_REAL_CST (arg1),
1717 TYPE_SATURATING (type));
1718 t = build_fixed (type, value);
1720 /* Propagate overflow flags. */
1721 if (overflow_p | TREE_OVERFLOW (arg1))
1722 TREE_OVERFLOW (t) = 1;
1723 return t;
1726 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1727 type TYPE. If no simplification can be done return NULL_TREE. */
1729 static tree
1730 fold_convert_const (enum tree_code code, tree type, tree arg1)
1732 if (TREE_TYPE (arg1) == type)
1733 return arg1;
1735 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1736 || TREE_CODE (type) == OFFSET_TYPE)
1738 if (TREE_CODE (arg1) == INTEGER_CST)
1739 return fold_convert_const_int_from_int (type, arg1);
1740 else if (TREE_CODE (arg1) == REAL_CST)
1741 return fold_convert_const_int_from_real (code, type, arg1);
1742 else if (TREE_CODE (arg1) == FIXED_CST)
1743 return fold_convert_const_int_from_fixed (type, arg1);
1745 else if (TREE_CODE (type) == REAL_TYPE)
1747 if (TREE_CODE (arg1) == INTEGER_CST)
1748 return build_real_from_int_cst (type, arg1);
1749 else if (TREE_CODE (arg1) == REAL_CST)
1750 return fold_convert_const_real_from_real (type, arg1);
1751 else if (TREE_CODE (arg1) == FIXED_CST)
1752 return fold_convert_const_real_from_fixed (type, arg1);
1754 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1756 if (TREE_CODE (arg1) == FIXED_CST)
1757 return fold_convert_const_fixed_from_fixed (type, arg1);
1758 else if (TREE_CODE (arg1) == INTEGER_CST)
1759 return fold_convert_const_fixed_from_int (type, arg1);
1760 else if (TREE_CODE (arg1) == REAL_CST)
1761 return fold_convert_const_fixed_from_real (type, arg1);
1763 return NULL_TREE;
1766 /* Construct a vector of zero elements of vector type TYPE. */
1768 static tree
1769 build_zero_vector (tree type)
1771 tree t;
1773 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1774 return build_vector_from_val (type, t);
1777 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1779 bool
1780 fold_convertible_p (const_tree type, const_tree arg)
1782 tree orig = TREE_TYPE (arg);
1784 if (type == orig)
1785 return true;
1787 if (TREE_CODE (arg) == ERROR_MARK
1788 || TREE_CODE (type) == ERROR_MARK
1789 || TREE_CODE (orig) == ERROR_MARK)
1790 return false;
1792 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1793 return true;
1795 switch (TREE_CODE (type))
1797 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1798 case POINTER_TYPE: case REFERENCE_TYPE:
1799 case OFFSET_TYPE:
1800 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1801 || TREE_CODE (orig) == OFFSET_TYPE)
1802 return true;
1803 return (TREE_CODE (orig) == VECTOR_TYPE
1804 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1806 case REAL_TYPE:
1807 case FIXED_POINT_TYPE:
1808 case COMPLEX_TYPE:
1809 case VECTOR_TYPE:
1810 case VOID_TYPE:
1811 return TREE_CODE (type) == TREE_CODE (orig);
1813 default:
1814 return false;
1818 /* Convert expression ARG to type TYPE. Used by the middle-end for
1819 simple conversions in preference to calling the front-end's convert. */
1821 tree
1822 fold_convert_loc (location_t loc, tree type, tree arg)
1824 tree orig = TREE_TYPE (arg);
1825 tree tem;
1827 if (type == orig)
1828 return arg;
1830 if (TREE_CODE (arg) == ERROR_MARK
1831 || TREE_CODE (type) == ERROR_MARK
1832 || TREE_CODE (orig) == ERROR_MARK)
1833 return error_mark_node;
1835 switch (TREE_CODE (type))
1837 case POINTER_TYPE:
1838 case REFERENCE_TYPE:
1839 /* Handle conversions between pointers to different address spaces. */
1840 if (POINTER_TYPE_P (orig)
1841 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1842 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1843 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1844 /* fall through */
1846 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1847 case OFFSET_TYPE:
1848 if (TREE_CODE (arg) == INTEGER_CST)
1850 tem = fold_convert_const (NOP_EXPR, type, arg);
1851 if (tem != NULL_TREE)
1852 return tem;
1854 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1855 || TREE_CODE (orig) == OFFSET_TYPE)
1856 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1857 if (TREE_CODE (orig) == COMPLEX_TYPE)
1858 return fold_convert_loc (loc, type,
1859 fold_build1_loc (loc, REALPART_EXPR,
1860 TREE_TYPE (orig), arg));
1861 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1862 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1863 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1865 case REAL_TYPE:
1866 if (TREE_CODE (arg) == INTEGER_CST)
1868 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1869 if (tem != NULL_TREE)
1870 return tem;
1872 else if (TREE_CODE (arg) == REAL_CST)
1874 tem = fold_convert_const (NOP_EXPR, type, arg);
1875 if (tem != NULL_TREE)
1876 return tem;
1878 else if (TREE_CODE (arg) == FIXED_CST)
1880 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1881 if (tem != NULL_TREE)
1882 return tem;
1885 switch (TREE_CODE (orig))
1887 case INTEGER_TYPE:
1888 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1889 case POINTER_TYPE: case REFERENCE_TYPE:
1890 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1892 case REAL_TYPE:
1893 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1895 case FIXED_POINT_TYPE:
1896 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1898 case COMPLEX_TYPE:
1899 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1900 return fold_convert_loc (loc, type, tem);
1902 default:
1903 gcc_unreachable ();
1906 case FIXED_POINT_TYPE:
1907 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1908 || TREE_CODE (arg) == REAL_CST)
1910 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1911 if (tem != NULL_TREE)
1912 goto fold_convert_exit;
1915 switch (TREE_CODE (orig))
1917 case FIXED_POINT_TYPE:
1918 case INTEGER_TYPE:
1919 case ENUMERAL_TYPE:
1920 case BOOLEAN_TYPE:
1921 case REAL_TYPE:
1922 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1924 case COMPLEX_TYPE:
1925 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1926 return fold_convert_loc (loc, type, tem);
1928 default:
1929 gcc_unreachable ();
1932 case COMPLEX_TYPE:
1933 switch (TREE_CODE (orig))
1935 case INTEGER_TYPE:
1936 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1937 case POINTER_TYPE: case REFERENCE_TYPE:
1938 case REAL_TYPE:
1939 case FIXED_POINT_TYPE:
1940 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1941 fold_convert_loc (loc, TREE_TYPE (type), arg),
1942 fold_convert_loc (loc, TREE_TYPE (type),
1943 integer_zero_node));
1944 case COMPLEX_TYPE:
1946 tree rpart, ipart;
1948 if (TREE_CODE (arg) == COMPLEX_EXPR)
1950 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1951 TREE_OPERAND (arg, 0));
1952 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1953 TREE_OPERAND (arg, 1));
1954 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1957 arg = save_expr (arg);
1958 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1959 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1960 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1961 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1962 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1965 default:
1966 gcc_unreachable ();
1969 case VECTOR_TYPE:
1970 if (integer_zerop (arg))
1971 return build_zero_vector (type);
1972 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1973 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1974 || TREE_CODE (orig) == VECTOR_TYPE);
1975 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
1977 case VOID_TYPE:
1978 tem = fold_ignored_result (arg);
1979 return fold_build1_loc (loc, NOP_EXPR, type, tem);
1981 default:
1982 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1983 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1984 gcc_unreachable ();
1986 fold_convert_exit:
1987 protected_set_expr_location_unshare (tem, loc);
1988 return tem;
1991 /* Return false if expr can be assumed not to be an lvalue, true
1992 otherwise. */
1994 static bool
1995 maybe_lvalue_p (const_tree x)
1997 /* We only need to wrap lvalue tree codes. */
1998 switch (TREE_CODE (x))
2000 case VAR_DECL:
2001 case PARM_DECL:
2002 case RESULT_DECL:
2003 case LABEL_DECL:
2004 case FUNCTION_DECL:
2005 case SSA_NAME:
2007 case COMPONENT_REF:
2008 case MEM_REF:
2009 case INDIRECT_REF:
2010 case ARRAY_REF:
2011 case ARRAY_RANGE_REF:
2012 case BIT_FIELD_REF:
2013 case OBJ_TYPE_REF:
2015 case REALPART_EXPR:
2016 case IMAGPART_EXPR:
2017 case PREINCREMENT_EXPR:
2018 case PREDECREMENT_EXPR:
2019 case SAVE_EXPR:
2020 case TRY_CATCH_EXPR:
2021 case WITH_CLEANUP_EXPR:
2022 case COMPOUND_EXPR:
2023 case MODIFY_EXPR:
2024 case TARGET_EXPR:
2025 case COND_EXPR:
2026 case BIND_EXPR:
2027 break;
2029 default:
2030 /* Assume the worst for front-end tree codes. */
2031 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2032 break;
2033 return false;
2036 return true;
2039 /* Return an expr equal to X but certainly not valid as an lvalue. */
2041 tree
2042 non_lvalue_loc (location_t loc, tree x)
2044 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2045 us. */
2046 if (in_gimple_form)
2047 return x;
2049 if (! maybe_lvalue_p (x))
2050 return x;
2051 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2054 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2055 Zero means allow extended lvalues. */
2057 int pedantic_lvalues;
2059 /* When pedantic, return an expr equal to X but certainly not valid as a
2060 pedantic lvalue. Otherwise, return X. */
2062 static tree
2063 pedantic_non_lvalue_loc (location_t loc, tree x)
2065 if (pedantic_lvalues)
2066 return non_lvalue_loc (loc, x);
2068 return protected_set_expr_location_unshare (x, loc);
2071 /* Given a tree comparison code, return the code that is the logical inverse.
2072 It is generally not safe to do this for floating-point comparisons, except
2073 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2074 ERROR_MARK in this case. */
2076 enum tree_code
2077 invert_tree_comparison (enum tree_code code, bool honor_nans)
2079 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2080 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2081 return ERROR_MARK;
2083 switch (code)
2085 case EQ_EXPR:
2086 return NE_EXPR;
2087 case NE_EXPR:
2088 return EQ_EXPR;
2089 case GT_EXPR:
2090 return honor_nans ? UNLE_EXPR : LE_EXPR;
2091 case GE_EXPR:
2092 return honor_nans ? UNLT_EXPR : LT_EXPR;
2093 case LT_EXPR:
2094 return honor_nans ? UNGE_EXPR : GE_EXPR;
2095 case LE_EXPR:
2096 return honor_nans ? UNGT_EXPR : GT_EXPR;
2097 case LTGT_EXPR:
2098 return UNEQ_EXPR;
2099 case UNEQ_EXPR:
2100 return LTGT_EXPR;
2101 case UNGT_EXPR:
2102 return LE_EXPR;
2103 case UNGE_EXPR:
2104 return LT_EXPR;
2105 case UNLT_EXPR:
2106 return GE_EXPR;
2107 case UNLE_EXPR:
2108 return GT_EXPR;
2109 case ORDERED_EXPR:
2110 return UNORDERED_EXPR;
2111 case UNORDERED_EXPR:
2112 return ORDERED_EXPR;
2113 default:
2114 gcc_unreachable ();
2118 /* Similar, but return the comparison that results if the operands are
2119 swapped. This is safe for floating-point. */
2121 enum tree_code
2122 swap_tree_comparison (enum tree_code code)
2124 switch (code)
2126 case EQ_EXPR:
2127 case NE_EXPR:
2128 case ORDERED_EXPR:
2129 case UNORDERED_EXPR:
2130 case LTGT_EXPR:
2131 case UNEQ_EXPR:
2132 return code;
2133 case GT_EXPR:
2134 return LT_EXPR;
2135 case GE_EXPR:
2136 return LE_EXPR;
2137 case LT_EXPR:
2138 return GT_EXPR;
2139 case LE_EXPR:
2140 return GE_EXPR;
2141 case UNGT_EXPR:
2142 return UNLT_EXPR;
2143 case UNGE_EXPR:
2144 return UNLE_EXPR;
2145 case UNLT_EXPR:
2146 return UNGT_EXPR;
2147 case UNLE_EXPR:
2148 return UNGE_EXPR;
2149 default:
2150 gcc_unreachable ();
2155 /* Convert a comparison tree code from an enum tree_code representation
2156 into a compcode bit-based encoding. This function is the inverse of
2157 compcode_to_comparison. */
2159 static enum comparison_code
2160 comparison_to_compcode (enum tree_code code)
2162 switch (code)
2164 case LT_EXPR:
2165 return COMPCODE_LT;
2166 case EQ_EXPR:
2167 return COMPCODE_EQ;
2168 case LE_EXPR:
2169 return COMPCODE_LE;
2170 case GT_EXPR:
2171 return COMPCODE_GT;
2172 case NE_EXPR:
2173 return COMPCODE_NE;
2174 case GE_EXPR:
2175 return COMPCODE_GE;
2176 case ORDERED_EXPR:
2177 return COMPCODE_ORD;
2178 case UNORDERED_EXPR:
2179 return COMPCODE_UNORD;
2180 case UNLT_EXPR:
2181 return COMPCODE_UNLT;
2182 case UNEQ_EXPR:
2183 return COMPCODE_UNEQ;
2184 case UNLE_EXPR:
2185 return COMPCODE_UNLE;
2186 case UNGT_EXPR:
2187 return COMPCODE_UNGT;
2188 case LTGT_EXPR:
2189 return COMPCODE_LTGT;
2190 case UNGE_EXPR:
2191 return COMPCODE_UNGE;
2192 default:
2193 gcc_unreachable ();
2197 /* Convert a compcode bit-based encoding of a comparison operator back
2198 to GCC's enum tree_code representation. This function is the
2199 inverse of comparison_to_compcode. */
2201 static enum tree_code
2202 compcode_to_comparison (enum comparison_code code)
2204 switch (code)
2206 case COMPCODE_LT:
2207 return LT_EXPR;
2208 case COMPCODE_EQ:
2209 return EQ_EXPR;
2210 case COMPCODE_LE:
2211 return LE_EXPR;
2212 case COMPCODE_GT:
2213 return GT_EXPR;
2214 case COMPCODE_NE:
2215 return NE_EXPR;
2216 case COMPCODE_GE:
2217 return GE_EXPR;
2218 case COMPCODE_ORD:
2219 return ORDERED_EXPR;
2220 case COMPCODE_UNORD:
2221 return UNORDERED_EXPR;
2222 case COMPCODE_UNLT:
2223 return UNLT_EXPR;
2224 case COMPCODE_UNEQ:
2225 return UNEQ_EXPR;
2226 case COMPCODE_UNLE:
2227 return UNLE_EXPR;
2228 case COMPCODE_UNGT:
2229 return UNGT_EXPR;
2230 case COMPCODE_LTGT:
2231 return LTGT_EXPR;
2232 case COMPCODE_UNGE:
2233 return UNGE_EXPR;
2234 default:
2235 gcc_unreachable ();
2239 /* Return a tree for the comparison which is the combination of
2240 doing the AND or OR (depending on CODE) of the two operations LCODE
2241 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2242 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2243 if this makes the transformation invalid. */
2245 tree
2246 combine_comparisons (location_t loc,
2247 enum tree_code code, enum tree_code lcode,
2248 enum tree_code rcode, tree truth_type,
2249 tree ll_arg, tree lr_arg)
2251 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2252 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2253 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2254 int compcode;
2256 switch (code)
2258 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2259 compcode = lcompcode & rcompcode;
2260 break;
2262 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2263 compcode = lcompcode | rcompcode;
2264 break;
2266 default:
2267 return NULL_TREE;
2270 if (!honor_nans)
2272 /* Eliminate unordered comparisons, as well as LTGT and ORD
2273 which are not used unless the mode has NaNs. */
2274 compcode &= ~COMPCODE_UNORD;
2275 if (compcode == COMPCODE_LTGT)
2276 compcode = COMPCODE_NE;
2277 else if (compcode == COMPCODE_ORD)
2278 compcode = COMPCODE_TRUE;
2280 else if (flag_trapping_math)
2282 /* Check that the original operation and the optimized ones will trap
2283 under the same condition. */
2284 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2285 && (lcompcode != COMPCODE_EQ)
2286 && (lcompcode != COMPCODE_ORD);
2287 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2288 && (rcompcode != COMPCODE_EQ)
2289 && (rcompcode != COMPCODE_ORD);
2290 bool trap = (compcode & COMPCODE_UNORD) == 0
2291 && (compcode != COMPCODE_EQ)
2292 && (compcode != COMPCODE_ORD);
2294 /* In a short-circuited boolean expression the LHS might be
2295 such that the RHS, if evaluated, will never trap. For
2296 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2297 if neither x nor y is NaN. (This is a mixed blessing: for
2298 example, the expression above will never trap, hence
2299 optimizing it to x < y would be invalid). */
2300 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2301 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2302 rtrap = false;
2304 /* If the comparison was short-circuited, and only the RHS
2305 trapped, we may now generate a spurious trap. */
2306 if (rtrap && !ltrap
2307 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2308 return NULL_TREE;
2310 /* If we changed the conditions that cause a trap, we lose. */
2311 if ((ltrap || rtrap) != trap)
2312 return NULL_TREE;
2315 if (compcode == COMPCODE_TRUE)
2316 return constant_boolean_node (true, truth_type);
2317 else if (compcode == COMPCODE_FALSE)
2318 return constant_boolean_node (false, truth_type);
2319 else
2321 enum tree_code tcode;
2323 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2324 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2328 /* Return nonzero if two operands (typically of the same tree node)
2329 are necessarily equal. If either argument has side-effects this
2330 function returns zero. FLAGS modifies behavior as follows:
2332 If OEP_ONLY_CONST is set, only return nonzero for constants.
2333 This function tests whether the operands are indistinguishable;
2334 it does not test whether they are equal using C's == operation.
2335 The distinction is important for IEEE floating point, because
2336 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2337 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2339 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2340 even though it may hold multiple values during a function.
2341 This is because a GCC tree node guarantees that nothing else is
2342 executed between the evaluation of its "operands" (which may often
2343 be evaluated in arbitrary order). Hence if the operands themselves
2344 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2345 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2346 unset means assuming isochronic (or instantaneous) tree equivalence.
2347 Unless comparing arbitrary expression trees, such as from different
2348 statements, this flag can usually be left unset.
2350 If OEP_PURE_SAME is set, then pure functions with identical arguments
2351 are considered the same. It is used when the caller has other ways
2352 to ensure that global memory is unchanged in between. */
2355 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2357 /* If either is ERROR_MARK, they aren't equal. */
2358 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2359 || TREE_TYPE (arg0) == error_mark_node
2360 || TREE_TYPE (arg1) == error_mark_node)
2361 return 0;
2363 /* Similar, if either does not have a type (like a released SSA name),
2364 they aren't equal. */
2365 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2366 return 0;
2368 /* Check equality of integer constants before bailing out due to
2369 precision differences. */
2370 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2371 return tree_int_cst_equal (arg0, arg1);
2373 /* If both types don't have the same signedness, then we can't consider
2374 them equal. We must check this before the STRIP_NOPS calls
2375 because they may change the signedness of the arguments. As pointers
2376 strictly don't have a signedness, require either two pointers or
2377 two non-pointers as well. */
2378 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2379 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2380 return 0;
2382 /* We cannot consider pointers to different address space equal. */
2383 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2384 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2385 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2386 return 0;
2388 /* If both types don't have the same precision, then it is not safe
2389 to strip NOPs. */
2390 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2391 return 0;
2393 STRIP_NOPS (arg0);
2394 STRIP_NOPS (arg1);
2396 /* In case both args are comparisons but with different comparison
2397 code, try to swap the comparison operands of one arg to produce
2398 a match and compare that variant. */
2399 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2400 && COMPARISON_CLASS_P (arg0)
2401 && COMPARISON_CLASS_P (arg1))
2403 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2405 if (TREE_CODE (arg0) == swap_code)
2406 return operand_equal_p (TREE_OPERAND (arg0, 0),
2407 TREE_OPERAND (arg1, 1), flags)
2408 && operand_equal_p (TREE_OPERAND (arg0, 1),
2409 TREE_OPERAND (arg1, 0), flags);
2412 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2413 /* This is needed for conversions and for COMPONENT_REF.
2414 Might as well play it safe and always test this. */
2415 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2416 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2417 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2418 return 0;
2420 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2421 We don't care about side effects in that case because the SAVE_EXPR
2422 takes care of that for us. In all other cases, two expressions are
2423 equal if they have no side effects. If we have two identical
2424 expressions with side effects that should be treated the same due
2425 to the only side effects being identical SAVE_EXPR's, that will
2426 be detected in the recursive calls below.
2427 If we are taking an invariant address of two identical objects
2428 they are necessarily equal as well. */
2429 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2430 && (TREE_CODE (arg0) == SAVE_EXPR
2431 || (flags & OEP_CONSTANT_ADDRESS_OF)
2432 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2433 return 1;
2435 /* Next handle constant cases, those for which we can return 1 even
2436 if ONLY_CONST is set. */
2437 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2438 switch (TREE_CODE (arg0))
2440 case INTEGER_CST:
2441 return tree_int_cst_equal (arg0, arg1);
2443 case FIXED_CST:
2444 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2445 TREE_FIXED_CST (arg1));
2447 case REAL_CST:
2448 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2449 TREE_REAL_CST (arg1)))
2450 return 1;
2453 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2455 /* If we do not distinguish between signed and unsigned zero,
2456 consider them equal. */
2457 if (real_zerop (arg0) && real_zerop (arg1))
2458 return 1;
2460 return 0;
2462 case VECTOR_CST:
2464 unsigned i;
2466 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2467 return 0;
2469 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2471 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2472 VECTOR_CST_ELT (arg1, i), flags))
2473 return 0;
2475 return 1;
2478 case COMPLEX_CST:
2479 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2480 flags)
2481 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2482 flags));
2484 case STRING_CST:
2485 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2486 && ! memcmp (TREE_STRING_POINTER (arg0),
2487 TREE_STRING_POINTER (arg1),
2488 TREE_STRING_LENGTH (arg0)));
2490 case ADDR_EXPR:
2491 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2492 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2493 ? OEP_CONSTANT_ADDRESS_OF : 0);
2494 default:
2495 break;
2498 if (flags & OEP_ONLY_CONST)
2499 return 0;
2501 /* Define macros to test an operand from arg0 and arg1 for equality and a
2502 variant that allows null and views null as being different from any
2503 non-null value. In the latter case, if either is null, the both
2504 must be; otherwise, do the normal comparison. */
2505 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2506 TREE_OPERAND (arg1, N), flags)
2508 #define OP_SAME_WITH_NULL(N) \
2509 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2510 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2512 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2514 case tcc_unary:
2515 /* Two conversions are equal only if signedness and modes match. */
2516 switch (TREE_CODE (arg0))
2518 CASE_CONVERT:
2519 case FIX_TRUNC_EXPR:
2520 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2521 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2522 return 0;
2523 break;
2524 default:
2525 break;
2528 return OP_SAME (0);
2531 case tcc_comparison:
2532 case tcc_binary:
2533 if (OP_SAME (0) && OP_SAME (1))
2534 return 1;
2536 /* For commutative ops, allow the other order. */
2537 return (commutative_tree_code (TREE_CODE (arg0))
2538 && operand_equal_p (TREE_OPERAND (arg0, 0),
2539 TREE_OPERAND (arg1, 1), flags)
2540 && operand_equal_p (TREE_OPERAND (arg0, 1),
2541 TREE_OPERAND (arg1, 0), flags));
2543 case tcc_reference:
2544 /* If either of the pointer (or reference) expressions we are
2545 dereferencing contain a side effect, these cannot be equal. */
2546 if (TREE_SIDE_EFFECTS (arg0)
2547 || TREE_SIDE_EFFECTS (arg1))
2548 return 0;
2550 switch (TREE_CODE (arg0))
2552 case INDIRECT_REF:
2553 case REALPART_EXPR:
2554 case IMAGPART_EXPR:
2555 return OP_SAME (0);
2557 case TARGET_MEM_REF:
2558 /* Require equal extra operands and then fall through to MEM_REF
2559 handling of the two common operands. */
2560 if (!OP_SAME_WITH_NULL (2)
2561 || !OP_SAME_WITH_NULL (3)
2562 || !OP_SAME_WITH_NULL (4))
2563 return 0;
2564 /* Fallthru. */
2565 case MEM_REF:
2566 /* Require equal access sizes, and similar pointer types.
2567 We can have incomplete types for array references of
2568 variable-sized arrays from the Fortran frontent
2569 though. */
2570 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2571 || (TYPE_SIZE (TREE_TYPE (arg0))
2572 && TYPE_SIZE (TREE_TYPE (arg1))
2573 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2574 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2575 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2576 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2577 && OP_SAME (0) && OP_SAME (1));
2579 case ARRAY_REF:
2580 case ARRAY_RANGE_REF:
2581 /* Operands 2 and 3 may be null.
2582 Compare the array index by value if it is constant first as we
2583 may have different types but same value here. */
2584 return (OP_SAME (0)
2585 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2586 TREE_OPERAND (arg1, 1))
2587 || OP_SAME (1))
2588 && OP_SAME_WITH_NULL (2)
2589 && OP_SAME_WITH_NULL (3));
2591 case COMPONENT_REF:
2592 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2593 may be NULL when we're called to compare MEM_EXPRs. */
2594 return OP_SAME_WITH_NULL (0)
2595 && OP_SAME (1)
2596 && OP_SAME_WITH_NULL (2);
2598 case BIT_FIELD_REF:
2599 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2601 default:
2602 return 0;
2605 case tcc_expression:
2606 switch (TREE_CODE (arg0))
2608 case ADDR_EXPR:
2609 case TRUTH_NOT_EXPR:
2610 return OP_SAME (0);
2612 case TRUTH_ANDIF_EXPR:
2613 case TRUTH_ORIF_EXPR:
2614 return OP_SAME (0) && OP_SAME (1);
2616 case FMA_EXPR:
2617 case WIDEN_MULT_PLUS_EXPR:
2618 case WIDEN_MULT_MINUS_EXPR:
2619 if (!OP_SAME (2))
2620 return 0;
2621 /* The multiplcation operands are commutative. */
2622 /* FALLTHRU */
2624 case TRUTH_AND_EXPR:
2625 case TRUTH_OR_EXPR:
2626 case TRUTH_XOR_EXPR:
2627 if (OP_SAME (0) && OP_SAME (1))
2628 return 1;
2630 /* Otherwise take into account this is a commutative operation. */
2631 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2632 TREE_OPERAND (arg1, 1), flags)
2633 && operand_equal_p (TREE_OPERAND (arg0, 1),
2634 TREE_OPERAND (arg1, 0), flags));
2636 case COND_EXPR:
2637 case VEC_COND_EXPR:
2638 case DOT_PROD_EXPR:
2639 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2641 default:
2642 return 0;
2645 case tcc_vl_exp:
2646 switch (TREE_CODE (arg0))
2648 case CALL_EXPR:
2649 /* If the CALL_EXPRs call different functions, then they
2650 clearly can not be equal. */
2651 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2652 flags))
2653 return 0;
2656 unsigned int cef = call_expr_flags (arg0);
2657 if (flags & OEP_PURE_SAME)
2658 cef &= ECF_CONST | ECF_PURE;
2659 else
2660 cef &= ECF_CONST;
2661 if (!cef)
2662 return 0;
2665 /* Now see if all the arguments are the same. */
2667 const_call_expr_arg_iterator iter0, iter1;
2668 const_tree a0, a1;
2669 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2670 a1 = first_const_call_expr_arg (arg1, &iter1);
2671 a0 && a1;
2672 a0 = next_const_call_expr_arg (&iter0),
2673 a1 = next_const_call_expr_arg (&iter1))
2674 if (! operand_equal_p (a0, a1, flags))
2675 return 0;
2677 /* If we get here and both argument lists are exhausted
2678 then the CALL_EXPRs are equal. */
2679 return ! (a0 || a1);
2681 default:
2682 return 0;
2685 case tcc_declaration:
2686 /* Consider __builtin_sqrt equal to sqrt. */
2687 return (TREE_CODE (arg0) == FUNCTION_DECL
2688 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2689 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2690 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2692 default:
2693 return 0;
2696 #undef OP_SAME
2697 #undef OP_SAME_WITH_NULL
2700 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2701 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2703 When in doubt, return 0. */
2705 static int
2706 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2708 int unsignedp1, unsignedpo;
2709 tree primarg0, primarg1, primother;
2710 unsigned int correct_width;
2712 if (operand_equal_p (arg0, arg1, 0))
2713 return 1;
2715 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2716 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2717 return 0;
2719 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2720 and see if the inner values are the same. This removes any
2721 signedness comparison, which doesn't matter here. */
2722 primarg0 = arg0, primarg1 = arg1;
2723 STRIP_NOPS (primarg0);
2724 STRIP_NOPS (primarg1);
2725 if (operand_equal_p (primarg0, primarg1, 0))
2726 return 1;
2728 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2729 actual comparison operand, ARG0.
2731 First throw away any conversions to wider types
2732 already present in the operands. */
2734 primarg1 = get_narrower (arg1, &unsignedp1);
2735 primother = get_narrower (other, &unsignedpo);
2737 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2738 if (unsignedp1 == unsignedpo
2739 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2740 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2742 tree type = TREE_TYPE (arg0);
2744 /* Make sure shorter operand is extended the right way
2745 to match the longer operand. */
2746 primarg1 = fold_convert (signed_or_unsigned_type_for
2747 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2749 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2750 return 1;
2753 return 0;
2756 /* See if ARG is an expression that is either a comparison or is performing
2757 arithmetic on comparisons. The comparisons must only be comparing
2758 two different values, which will be stored in *CVAL1 and *CVAL2; if
2759 they are nonzero it means that some operands have already been found.
2760 No variables may be used anywhere else in the expression except in the
2761 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2762 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2764 If this is true, return 1. Otherwise, return zero. */
2766 static int
2767 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2769 enum tree_code code = TREE_CODE (arg);
2770 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2772 /* We can handle some of the tcc_expression cases here. */
2773 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2774 tclass = tcc_unary;
2775 else if (tclass == tcc_expression
2776 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2777 || code == COMPOUND_EXPR))
2778 tclass = tcc_binary;
2780 else if (tclass == tcc_expression && code == SAVE_EXPR
2781 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2783 /* If we've already found a CVAL1 or CVAL2, this expression is
2784 two complex to handle. */
2785 if (*cval1 || *cval2)
2786 return 0;
2788 tclass = tcc_unary;
2789 *save_p = 1;
2792 switch (tclass)
2794 case tcc_unary:
2795 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2797 case tcc_binary:
2798 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2799 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2800 cval1, cval2, save_p));
2802 case tcc_constant:
2803 return 1;
2805 case tcc_expression:
2806 if (code == COND_EXPR)
2807 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2808 cval1, cval2, save_p)
2809 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2810 cval1, cval2, save_p)
2811 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2812 cval1, cval2, save_p));
2813 return 0;
2815 case tcc_comparison:
2816 /* First see if we can handle the first operand, then the second. For
2817 the second operand, we know *CVAL1 can't be zero. It must be that
2818 one side of the comparison is each of the values; test for the
2819 case where this isn't true by failing if the two operands
2820 are the same. */
2822 if (operand_equal_p (TREE_OPERAND (arg, 0),
2823 TREE_OPERAND (arg, 1), 0))
2824 return 0;
2826 if (*cval1 == 0)
2827 *cval1 = TREE_OPERAND (arg, 0);
2828 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2830 else if (*cval2 == 0)
2831 *cval2 = TREE_OPERAND (arg, 0);
2832 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2834 else
2835 return 0;
2837 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2839 else if (*cval2 == 0)
2840 *cval2 = TREE_OPERAND (arg, 1);
2841 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2843 else
2844 return 0;
2846 return 1;
2848 default:
2849 return 0;
2853 /* ARG is a tree that is known to contain just arithmetic operations and
2854 comparisons. Evaluate the operations in the tree substituting NEW0 for
2855 any occurrence of OLD0 as an operand of a comparison and likewise for
2856 NEW1 and OLD1. */
2858 static tree
2859 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2860 tree old1, tree new1)
2862 tree type = TREE_TYPE (arg);
2863 enum tree_code code = TREE_CODE (arg);
2864 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2866 /* We can handle some of the tcc_expression cases here. */
2867 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2868 tclass = tcc_unary;
2869 else if (tclass == tcc_expression
2870 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2871 tclass = tcc_binary;
2873 switch (tclass)
2875 case tcc_unary:
2876 return fold_build1_loc (loc, code, type,
2877 eval_subst (loc, TREE_OPERAND (arg, 0),
2878 old0, new0, old1, new1));
2880 case tcc_binary:
2881 return fold_build2_loc (loc, code, type,
2882 eval_subst (loc, TREE_OPERAND (arg, 0),
2883 old0, new0, old1, new1),
2884 eval_subst (loc, TREE_OPERAND (arg, 1),
2885 old0, new0, old1, new1));
2887 case tcc_expression:
2888 switch (code)
2890 case SAVE_EXPR:
2891 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2892 old1, new1);
2894 case COMPOUND_EXPR:
2895 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2896 old1, new1);
2898 case COND_EXPR:
2899 return fold_build3_loc (loc, code, type,
2900 eval_subst (loc, TREE_OPERAND (arg, 0),
2901 old0, new0, old1, new1),
2902 eval_subst (loc, TREE_OPERAND (arg, 1),
2903 old0, new0, old1, new1),
2904 eval_subst (loc, TREE_OPERAND (arg, 2),
2905 old0, new0, old1, new1));
2906 default:
2907 break;
2909 /* Fall through - ??? */
2911 case tcc_comparison:
2913 tree arg0 = TREE_OPERAND (arg, 0);
2914 tree arg1 = TREE_OPERAND (arg, 1);
2916 /* We need to check both for exact equality and tree equality. The
2917 former will be true if the operand has a side-effect. In that
2918 case, we know the operand occurred exactly once. */
2920 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2921 arg0 = new0;
2922 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2923 arg0 = new1;
2925 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2926 arg1 = new0;
2927 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2928 arg1 = new1;
2930 return fold_build2_loc (loc, code, type, arg0, arg1);
2933 default:
2934 return arg;
2938 /* Return a tree for the case when the result of an expression is RESULT
2939 converted to TYPE and OMITTED was previously an operand of the expression
2940 but is now not needed (e.g., we folded OMITTED * 0).
2942 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2943 the conversion of RESULT to TYPE. */
2945 tree
2946 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2948 tree t = fold_convert_loc (loc, type, result);
2950 /* If the resulting operand is an empty statement, just return the omitted
2951 statement casted to void. */
2952 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2953 return build1_loc (loc, NOP_EXPR, void_type_node,
2954 fold_ignored_result (omitted));
2956 if (TREE_SIDE_EFFECTS (omitted))
2957 return build2_loc (loc, COMPOUND_EXPR, type,
2958 fold_ignored_result (omitted), t);
2960 return non_lvalue_loc (loc, t);
2963 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2965 static tree
2966 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2967 tree omitted)
2969 tree t = fold_convert_loc (loc, type, result);
2971 /* If the resulting operand is an empty statement, just return the omitted
2972 statement casted to void. */
2973 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2974 return build1_loc (loc, NOP_EXPR, void_type_node,
2975 fold_ignored_result (omitted));
2977 if (TREE_SIDE_EFFECTS (omitted))
2978 return build2_loc (loc, COMPOUND_EXPR, type,
2979 fold_ignored_result (omitted), t);
2981 return pedantic_non_lvalue_loc (loc, t);
2984 /* Return a tree for the case when the result of an expression is RESULT
2985 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2986 of the expression but are now not needed.
2988 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2989 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2990 evaluated before OMITTED2. Otherwise, if neither has side effects,
2991 just do the conversion of RESULT to TYPE. */
2993 tree
2994 omit_two_operands_loc (location_t loc, tree type, tree result,
2995 tree omitted1, tree omitted2)
2997 tree t = fold_convert_loc (loc, type, result);
2999 if (TREE_SIDE_EFFECTS (omitted2))
3000 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3001 if (TREE_SIDE_EFFECTS (omitted1))
3002 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3004 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3008 /* Return a simplified tree node for the truth-negation of ARG. This
3009 never alters ARG itself. We assume that ARG is an operation that
3010 returns a truth value (0 or 1).
3012 FIXME: one would think we would fold the result, but it causes
3013 problems with the dominator optimizer. */
3015 tree
3016 fold_truth_not_expr (location_t loc, tree arg)
3018 tree type = TREE_TYPE (arg);
3019 enum tree_code code = TREE_CODE (arg);
3020 location_t loc1, loc2;
3022 /* If this is a comparison, we can simply invert it, except for
3023 floating-point non-equality comparisons, in which case we just
3024 enclose a TRUTH_NOT_EXPR around what we have. */
3026 if (TREE_CODE_CLASS (code) == tcc_comparison)
3028 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3029 if (FLOAT_TYPE_P (op_type)
3030 && flag_trapping_math
3031 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3032 && code != NE_EXPR && code != EQ_EXPR)
3033 return NULL_TREE;
3035 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3036 if (code == ERROR_MARK)
3037 return NULL_TREE;
3039 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3040 TREE_OPERAND (arg, 1));
3043 switch (code)
3045 case INTEGER_CST:
3046 return constant_boolean_node (integer_zerop (arg), type);
3048 case TRUTH_AND_EXPR:
3049 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3050 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3051 return build2_loc (loc, TRUTH_OR_EXPR, type,
3052 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3053 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3055 case TRUTH_OR_EXPR:
3056 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3057 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3058 return build2_loc (loc, TRUTH_AND_EXPR, type,
3059 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3060 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3062 case TRUTH_XOR_EXPR:
3063 /* Here we can invert either operand. We invert the first operand
3064 unless the second operand is a TRUTH_NOT_EXPR in which case our
3065 result is the XOR of the first operand with the inside of the
3066 negation of the second operand. */
3068 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3069 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3070 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3071 else
3072 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3073 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3074 TREE_OPERAND (arg, 1));
3076 case TRUTH_ANDIF_EXPR:
3077 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3078 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3079 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3080 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3081 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3083 case TRUTH_ORIF_EXPR:
3084 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3085 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3086 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3087 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3088 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3090 case TRUTH_NOT_EXPR:
3091 return TREE_OPERAND (arg, 0);
3093 case COND_EXPR:
3095 tree arg1 = TREE_OPERAND (arg, 1);
3096 tree arg2 = TREE_OPERAND (arg, 2);
3098 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3099 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3101 /* A COND_EXPR may have a throw as one operand, which
3102 then has void type. Just leave void operands
3103 as they are. */
3104 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3105 VOID_TYPE_P (TREE_TYPE (arg1))
3106 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3107 VOID_TYPE_P (TREE_TYPE (arg2))
3108 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3111 case COMPOUND_EXPR:
3112 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3113 return build2_loc (loc, COMPOUND_EXPR, type,
3114 TREE_OPERAND (arg, 0),
3115 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3117 case NON_LVALUE_EXPR:
3118 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3119 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3121 CASE_CONVERT:
3122 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3123 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3125 /* ... fall through ... */
3127 case FLOAT_EXPR:
3128 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3129 return build1_loc (loc, TREE_CODE (arg), type,
3130 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3132 case BIT_AND_EXPR:
3133 if (!integer_onep (TREE_OPERAND (arg, 1)))
3134 return NULL_TREE;
3135 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3137 case SAVE_EXPR:
3138 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3140 case CLEANUP_POINT_EXPR:
3141 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3142 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3143 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3145 default:
3146 return NULL_TREE;
3150 /* Return a simplified tree node for the truth-negation of ARG. This
3151 never alters ARG itself. We assume that ARG is an operation that
3152 returns a truth value (0 or 1).
3154 FIXME: one would think we would fold the result, but it causes
3155 problems with the dominator optimizer. */
3157 tree
3158 invert_truthvalue_loc (location_t loc, tree arg)
3160 tree tem;
3162 if (TREE_CODE (arg) == ERROR_MARK)
3163 return arg;
3165 tem = fold_truth_not_expr (loc, arg);
3166 if (!tem)
3167 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3169 return tem;
3172 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3173 operands are another bit-wise operation with a common input. If so,
3174 distribute the bit operations to save an operation and possibly two if
3175 constants are involved. For example, convert
3176 (A | B) & (A | C) into A | (B & C)
3177 Further simplification will occur if B and C are constants.
3179 If this optimization cannot be done, 0 will be returned. */
3181 static tree
3182 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3183 tree arg0, tree arg1)
3185 tree common;
3186 tree left, right;
3188 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3189 || TREE_CODE (arg0) == code
3190 || (TREE_CODE (arg0) != BIT_AND_EXPR
3191 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3192 return 0;
3194 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3196 common = TREE_OPERAND (arg0, 0);
3197 left = TREE_OPERAND (arg0, 1);
3198 right = TREE_OPERAND (arg1, 1);
3200 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3202 common = TREE_OPERAND (arg0, 0);
3203 left = TREE_OPERAND (arg0, 1);
3204 right = TREE_OPERAND (arg1, 0);
3206 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3208 common = TREE_OPERAND (arg0, 1);
3209 left = TREE_OPERAND (arg0, 0);
3210 right = TREE_OPERAND (arg1, 1);
3212 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3214 common = TREE_OPERAND (arg0, 1);
3215 left = TREE_OPERAND (arg0, 0);
3216 right = TREE_OPERAND (arg1, 0);
3218 else
3219 return 0;
3221 common = fold_convert_loc (loc, type, common);
3222 left = fold_convert_loc (loc, type, left);
3223 right = fold_convert_loc (loc, type, right);
3224 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3225 fold_build2_loc (loc, code, type, left, right));
3228 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3229 with code CODE. This optimization is unsafe. */
3230 static tree
3231 distribute_real_division (location_t loc, enum tree_code code, tree type,
3232 tree arg0, tree arg1)
3234 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3235 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3237 /* (A / C) +- (B / C) -> (A +- B) / C. */
3238 if (mul0 == mul1
3239 && operand_equal_p (TREE_OPERAND (arg0, 1),
3240 TREE_OPERAND (arg1, 1), 0))
3241 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3242 fold_build2_loc (loc, code, type,
3243 TREE_OPERAND (arg0, 0),
3244 TREE_OPERAND (arg1, 0)),
3245 TREE_OPERAND (arg0, 1));
3247 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3248 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3249 TREE_OPERAND (arg1, 0), 0)
3250 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3251 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3253 REAL_VALUE_TYPE r0, r1;
3254 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3255 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3256 if (!mul0)
3257 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3258 if (!mul1)
3259 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3260 real_arithmetic (&r0, code, &r0, &r1);
3261 return fold_build2_loc (loc, MULT_EXPR, type,
3262 TREE_OPERAND (arg0, 0),
3263 build_real (type, r0));
3266 return NULL_TREE;
3269 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3270 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3272 static tree
3273 make_bit_field_ref (location_t loc, tree inner, tree type,
3274 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3276 tree result, bftype;
3278 if (bitpos == 0)
3280 tree size = TYPE_SIZE (TREE_TYPE (inner));
3281 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3282 || POINTER_TYPE_P (TREE_TYPE (inner)))
3283 && host_integerp (size, 0)
3284 && tree_low_cst (size, 0) == bitsize)
3285 return fold_convert_loc (loc, type, inner);
3288 bftype = type;
3289 if (TYPE_PRECISION (bftype) != bitsize
3290 || TYPE_UNSIGNED (bftype) == !unsignedp)
3291 bftype = build_nonstandard_integer_type (bitsize, 0);
3293 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3294 size_int (bitsize), bitsize_int (bitpos));
3296 if (bftype != type)
3297 result = fold_convert_loc (loc, type, result);
3299 return result;
3302 /* Optimize a bit-field compare.
3304 There are two cases: First is a compare against a constant and the
3305 second is a comparison of two items where the fields are at the same
3306 bit position relative to the start of a chunk (byte, halfword, word)
3307 large enough to contain it. In these cases we can avoid the shift
3308 implicit in bitfield extractions.
3310 For constants, we emit a compare of the shifted constant with the
3311 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3312 compared. For two fields at the same position, we do the ANDs with the
3313 similar mask and compare the result of the ANDs.
3315 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3316 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3317 are the left and right operands of the comparison, respectively.
3319 If the optimization described above can be done, we return the resulting
3320 tree. Otherwise we return zero. */
3322 static tree
3323 optimize_bit_field_compare (location_t loc, enum tree_code code,
3324 tree compare_type, tree lhs, tree rhs)
3326 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3327 tree type = TREE_TYPE (lhs);
3328 tree signed_type, unsigned_type;
3329 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3330 enum machine_mode lmode, rmode, nmode;
3331 int lunsignedp, runsignedp;
3332 int lvolatilep = 0, rvolatilep = 0;
3333 tree linner, rinner = NULL_TREE;
3334 tree mask;
3335 tree offset;
3337 /* In the strict volatile bitfields case, doing code changes here may prevent
3338 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3339 if (flag_strict_volatile_bitfields > 0)
3340 return 0;
3342 /* Get all the information about the extractions being done. If the bit size
3343 if the same as the size of the underlying object, we aren't doing an
3344 extraction at all and so can do nothing. We also don't want to
3345 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3346 then will no longer be able to replace it. */
3347 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3348 &lunsignedp, &lvolatilep, false);
3349 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3350 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3351 return 0;
3353 if (!const_p)
3355 /* If this is not a constant, we can only do something if bit positions,
3356 sizes, and signedness are the same. */
3357 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3358 &runsignedp, &rvolatilep, false);
3360 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3361 || lunsignedp != runsignedp || offset != 0
3362 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3363 return 0;
3366 /* See if we can find a mode to refer to this field. We should be able to,
3367 but fail if we can't. */
3368 if (lvolatilep
3369 && GET_MODE_BITSIZE (lmode) > 0
3370 && flag_strict_volatile_bitfields > 0)
3371 nmode = lmode;
3372 else
3373 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3374 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3375 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3376 TYPE_ALIGN (TREE_TYPE (rinner))),
3377 word_mode, lvolatilep || rvolatilep);
3378 if (nmode == VOIDmode)
3379 return 0;
3381 /* Set signed and unsigned types of the precision of this mode for the
3382 shifts below. */
3383 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3384 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3386 /* Compute the bit position and size for the new reference and our offset
3387 within it. If the new reference is the same size as the original, we
3388 won't optimize anything, so return zero. */
3389 nbitsize = GET_MODE_BITSIZE (nmode);
3390 nbitpos = lbitpos & ~ (nbitsize - 1);
3391 lbitpos -= nbitpos;
3392 if (nbitsize == lbitsize)
3393 return 0;
3395 if (BYTES_BIG_ENDIAN)
3396 lbitpos = nbitsize - lbitsize - lbitpos;
3398 /* Make the mask to be used against the extracted field. */
3399 mask = build_int_cst_type (unsigned_type, -1);
3400 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3401 mask = const_binop (RSHIFT_EXPR, mask,
3402 size_int (nbitsize - lbitsize - lbitpos));
3404 if (! const_p)
3405 /* If not comparing with constant, just rework the comparison
3406 and return. */
3407 return fold_build2_loc (loc, code, compare_type,
3408 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3409 make_bit_field_ref (loc, linner,
3410 unsigned_type,
3411 nbitsize, nbitpos,
3413 mask),
3414 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3415 make_bit_field_ref (loc, rinner,
3416 unsigned_type,
3417 nbitsize, nbitpos,
3419 mask));
3421 /* Otherwise, we are handling the constant case. See if the constant is too
3422 big for the field. Warn and return a tree of for 0 (false) if so. We do
3423 this not only for its own sake, but to avoid having to test for this
3424 error case below. If we didn't, we might generate wrong code.
3426 For unsigned fields, the constant shifted right by the field length should
3427 be all zero. For signed fields, the high-order bits should agree with
3428 the sign bit. */
3430 if (lunsignedp)
3432 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3433 fold_convert_loc (loc,
3434 unsigned_type, rhs),
3435 size_int (lbitsize))))
3437 warning (0, "comparison is always %d due to width of bit-field",
3438 code == NE_EXPR);
3439 return constant_boolean_node (code == NE_EXPR, compare_type);
3442 else
3444 tree tem = const_binop (RSHIFT_EXPR,
3445 fold_convert_loc (loc, signed_type, rhs),
3446 size_int (lbitsize - 1));
3447 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3449 warning (0, "comparison is always %d due to width of bit-field",
3450 code == NE_EXPR);
3451 return constant_boolean_node (code == NE_EXPR, compare_type);
3455 /* Single-bit compares should always be against zero. */
3456 if (lbitsize == 1 && ! integer_zerop (rhs))
3458 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3459 rhs = build_int_cst (type, 0);
3462 /* Make a new bitfield reference, shift the constant over the
3463 appropriate number of bits and mask it with the computed mask
3464 (in case this was a signed field). If we changed it, make a new one. */
3465 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3466 if (lvolatilep)
3468 TREE_SIDE_EFFECTS (lhs) = 1;
3469 TREE_THIS_VOLATILE (lhs) = 1;
3472 rhs = const_binop (BIT_AND_EXPR,
3473 const_binop (LSHIFT_EXPR,
3474 fold_convert_loc (loc, unsigned_type, rhs),
3475 size_int (lbitpos)),
3476 mask);
3478 lhs = build2_loc (loc, code, compare_type,
3479 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3480 return lhs;
3483 /* Subroutine for fold_truth_andor_1: decode a field reference.
3485 If EXP is a comparison reference, we return the innermost reference.
3487 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3488 set to the starting bit number.
3490 If the innermost field can be completely contained in a mode-sized
3491 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3493 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3494 otherwise it is not changed.
3496 *PUNSIGNEDP is set to the signedness of the field.
3498 *PMASK is set to the mask used. This is either contained in a
3499 BIT_AND_EXPR or derived from the width of the field.
3501 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3503 Return 0 if this is not a component reference or is one that we can't
3504 do anything with. */
3506 static tree
3507 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3508 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3509 int *punsignedp, int *pvolatilep,
3510 tree *pmask, tree *pand_mask)
3512 tree outer_type = 0;
3513 tree and_mask = 0;
3514 tree mask, inner, offset;
3515 tree unsigned_type;
3516 unsigned int precision;
3518 /* All the optimizations using this function assume integer fields.
3519 There are problems with FP fields since the type_for_size call
3520 below can fail for, e.g., XFmode. */
3521 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3522 return 0;
3524 /* We are interested in the bare arrangement of bits, so strip everything
3525 that doesn't affect the machine mode. However, record the type of the
3526 outermost expression if it may matter below. */
3527 if (CONVERT_EXPR_P (exp)
3528 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3529 outer_type = TREE_TYPE (exp);
3530 STRIP_NOPS (exp);
3532 if (TREE_CODE (exp) == BIT_AND_EXPR)
3534 and_mask = TREE_OPERAND (exp, 1);
3535 exp = TREE_OPERAND (exp, 0);
3536 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3537 if (TREE_CODE (and_mask) != INTEGER_CST)
3538 return 0;
3541 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3542 punsignedp, pvolatilep, false);
3543 if ((inner == exp && and_mask == 0)
3544 || *pbitsize < 0 || offset != 0
3545 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3546 return 0;
3548 /* If the number of bits in the reference is the same as the bitsize of
3549 the outer type, then the outer type gives the signedness. Otherwise
3550 (in case of a small bitfield) the signedness is unchanged. */
3551 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3552 *punsignedp = TYPE_UNSIGNED (outer_type);
3554 /* Compute the mask to access the bitfield. */
3555 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3556 precision = TYPE_PRECISION (unsigned_type);
3558 mask = build_int_cst_type (unsigned_type, -1);
3560 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3561 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3563 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3564 if (and_mask != 0)
3565 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3566 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3568 *pmask = mask;
3569 *pand_mask = and_mask;
3570 return inner;
3573 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3574 bit positions. */
3576 static int
3577 all_ones_mask_p (const_tree mask, int size)
3579 tree type = TREE_TYPE (mask);
3580 unsigned int precision = TYPE_PRECISION (type);
3581 tree tmask;
3583 tmask = build_int_cst_type (signed_type_for (type), -1);
3585 return
3586 tree_int_cst_equal (mask,
3587 const_binop (RSHIFT_EXPR,
3588 const_binop (LSHIFT_EXPR, tmask,
3589 size_int (precision - size)),
3590 size_int (precision - size)));
3593 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3594 represents the sign bit of EXP's type. If EXP represents a sign
3595 or zero extension, also test VAL against the unextended type.
3596 The return value is the (sub)expression whose sign bit is VAL,
3597 or NULL_TREE otherwise. */
3599 static tree
3600 sign_bit_p (tree exp, const_tree val)
3602 unsigned HOST_WIDE_INT mask_lo, lo;
3603 HOST_WIDE_INT mask_hi, hi;
3604 int width;
3605 tree t;
3607 /* Tree EXP must have an integral type. */
3608 t = TREE_TYPE (exp);
3609 if (! INTEGRAL_TYPE_P (t))
3610 return NULL_TREE;
3612 /* Tree VAL must be an integer constant. */
3613 if (TREE_CODE (val) != INTEGER_CST
3614 || TREE_OVERFLOW (val))
3615 return NULL_TREE;
3617 width = TYPE_PRECISION (t);
3618 if (width > HOST_BITS_PER_WIDE_INT)
3620 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3621 lo = 0;
3623 mask_hi = ((unsigned HOST_WIDE_INT) -1
3624 >> (HOST_BITS_PER_DOUBLE_INT - width));
3625 mask_lo = -1;
3627 else
3629 hi = 0;
3630 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3632 mask_hi = 0;
3633 mask_lo = ((unsigned HOST_WIDE_INT) -1
3634 >> (HOST_BITS_PER_WIDE_INT - width));
3637 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3638 treat VAL as if it were unsigned. */
3639 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3640 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3641 return exp;
3643 /* Handle extension from a narrower type. */
3644 if (TREE_CODE (exp) == NOP_EXPR
3645 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3646 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3648 return NULL_TREE;
3651 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3652 to be evaluated unconditionally. */
3654 static int
3655 simple_operand_p (const_tree exp)
3657 /* Strip any conversions that don't change the machine mode. */
3658 STRIP_NOPS (exp);
3660 return (CONSTANT_CLASS_P (exp)
3661 || TREE_CODE (exp) == SSA_NAME
3662 || (DECL_P (exp)
3663 && ! TREE_ADDRESSABLE (exp)
3664 && ! TREE_THIS_VOLATILE (exp)
3665 && ! DECL_NONLOCAL (exp)
3666 /* Don't regard global variables as simple. They may be
3667 allocated in ways unknown to the compiler (shared memory,
3668 #pragma weak, etc). */
3669 && ! TREE_PUBLIC (exp)
3670 && ! DECL_EXTERNAL (exp)
3671 /* Loading a static variable is unduly expensive, but global
3672 registers aren't expensive. */
3673 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3676 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3677 to be evaluated unconditionally.
3678 I addition to simple_operand_p, we assume that comparisons, conversions,
3679 and logic-not operations are simple, if their operands are simple, too. */
3681 static bool
3682 simple_operand_p_2 (tree exp)
3684 enum tree_code code;
3686 if (TREE_SIDE_EFFECTS (exp)
3687 || tree_could_trap_p (exp))
3688 return false;
3690 while (CONVERT_EXPR_P (exp))
3691 exp = TREE_OPERAND (exp, 0);
3693 code = TREE_CODE (exp);
3695 if (TREE_CODE_CLASS (code) == tcc_comparison)
3696 return (simple_operand_p (TREE_OPERAND (exp, 0))
3697 && simple_operand_p (TREE_OPERAND (exp, 1)));
3699 if (code == TRUTH_NOT_EXPR)
3700 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3702 return simple_operand_p (exp);
3706 /* The following functions are subroutines to fold_range_test and allow it to
3707 try to change a logical combination of comparisons into a range test.
3709 For example, both
3710 X == 2 || X == 3 || X == 4 || X == 5
3712 X >= 2 && X <= 5
3713 are converted to
3714 (unsigned) (X - 2) <= 3
3716 We describe each set of comparisons as being either inside or outside
3717 a range, using a variable named like IN_P, and then describe the
3718 range with a lower and upper bound. If one of the bounds is omitted,
3719 it represents either the highest or lowest value of the type.
3721 In the comments below, we represent a range by two numbers in brackets
3722 preceded by a "+" to designate being inside that range, or a "-" to
3723 designate being outside that range, so the condition can be inverted by
3724 flipping the prefix. An omitted bound is represented by a "-". For
3725 example, "- [-, 10]" means being outside the range starting at the lowest
3726 possible value and ending at 10, in other words, being greater than 10.
3727 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3728 always false.
3730 We set up things so that the missing bounds are handled in a consistent
3731 manner so neither a missing bound nor "true" and "false" need to be
3732 handled using a special case. */
3734 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3735 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3736 and UPPER1_P are nonzero if the respective argument is an upper bound
3737 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3738 must be specified for a comparison. ARG1 will be converted to ARG0's
3739 type if both are specified. */
3741 static tree
3742 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3743 tree arg1, int upper1_p)
3745 tree tem;
3746 int result;
3747 int sgn0, sgn1;
3749 /* If neither arg represents infinity, do the normal operation.
3750 Else, if not a comparison, return infinity. Else handle the special
3751 comparison rules. Note that most of the cases below won't occur, but
3752 are handled for consistency. */
3754 if (arg0 != 0 && arg1 != 0)
3756 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3757 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3758 STRIP_NOPS (tem);
3759 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3762 if (TREE_CODE_CLASS (code) != tcc_comparison)
3763 return 0;
3765 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3766 for neither. In real maths, we cannot assume open ended ranges are
3767 the same. But, this is computer arithmetic, where numbers are finite.
3768 We can therefore make the transformation of any unbounded range with
3769 the value Z, Z being greater than any representable number. This permits
3770 us to treat unbounded ranges as equal. */
3771 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3772 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3773 switch (code)
3775 case EQ_EXPR:
3776 result = sgn0 == sgn1;
3777 break;
3778 case NE_EXPR:
3779 result = sgn0 != sgn1;
3780 break;
3781 case LT_EXPR:
3782 result = sgn0 < sgn1;
3783 break;
3784 case LE_EXPR:
3785 result = sgn0 <= sgn1;
3786 break;
3787 case GT_EXPR:
3788 result = sgn0 > sgn1;
3789 break;
3790 case GE_EXPR:
3791 result = sgn0 >= sgn1;
3792 break;
3793 default:
3794 gcc_unreachable ();
3797 return constant_boolean_node (result, type);
3800 /* Helper routine for make_range. Perform one step for it, return
3801 new expression if the loop should continue or NULL_TREE if it should
3802 stop. */
3804 tree
3805 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3806 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3807 bool *strict_overflow_p)
3809 tree arg0_type = TREE_TYPE (arg0);
3810 tree n_low, n_high, low = *p_low, high = *p_high;
3811 int in_p = *p_in_p, n_in_p;
3813 switch (code)
3815 case TRUTH_NOT_EXPR:
3816 /* We can only do something if the range is testing for zero. */
3817 if (low == NULL_TREE || high == NULL_TREE
3818 || ! integer_zerop (low) || ! integer_zerop (high))
3819 return NULL_TREE;
3820 *p_in_p = ! in_p;
3821 return arg0;
3823 case EQ_EXPR: case NE_EXPR:
3824 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3825 /* We can only do something if the range is testing for zero
3826 and if the second operand is an integer constant. Note that
3827 saying something is "in" the range we make is done by
3828 complementing IN_P since it will set in the initial case of
3829 being not equal to zero; "out" is leaving it alone. */
3830 if (low == NULL_TREE || high == NULL_TREE
3831 || ! integer_zerop (low) || ! integer_zerop (high)
3832 || TREE_CODE (arg1) != INTEGER_CST)
3833 return NULL_TREE;
3835 switch (code)
3837 case NE_EXPR: /* - [c, c] */
3838 low = high = arg1;
3839 break;
3840 case EQ_EXPR: /* + [c, c] */
3841 in_p = ! in_p, low = high = arg1;
3842 break;
3843 case GT_EXPR: /* - [-, c] */
3844 low = 0, high = arg1;
3845 break;
3846 case GE_EXPR: /* + [c, -] */
3847 in_p = ! in_p, low = arg1, high = 0;
3848 break;
3849 case LT_EXPR: /* - [c, -] */
3850 low = arg1, high = 0;
3851 break;
3852 case LE_EXPR: /* + [-, c] */
3853 in_p = ! in_p, low = 0, high = arg1;
3854 break;
3855 default:
3856 gcc_unreachable ();
3859 /* If this is an unsigned comparison, we also know that EXP is
3860 greater than or equal to zero. We base the range tests we make
3861 on that fact, so we record it here so we can parse existing
3862 range tests. We test arg0_type since often the return type
3863 of, e.g. EQ_EXPR, is boolean. */
3864 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3866 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3867 in_p, low, high, 1,
3868 build_int_cst (arg0_type, 0),
3869 NULL_TREE))
3870 return NULL_TREE;
3872 in_p = n_in_p, low = n_low, high = n_high;
3874 /* If the high bound is missing, but we have a nonzero low
3875 bound, reverse the range so it goes from zero to the low bound
3876 minus 1. */
3877 if (high == 0 && low && ! integer_zerop (low))
3879 in_p = ! in_p;
3880 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3881 integer_one_node, 0);
3882 low = build_int_cst (arg0_type, 0);
3886 *p_low = low;
3887 *p_high = high;
3888 *p_in_p = in_p;
3889 return arg0;
3891 case NEGATE_EXPR:
3892 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3893 low and high are non-NULL, then normalize will DTRT. */
3894 if (!TYPE_UNSIGNED (arg0_type)
3895 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3897 if (low == NULL_TREE)
3898 low = TYPE_MIN_VALUE (arg0_type);
3899 if (high == NULL_TREE)
3900 high = TYPE_MAX_VALUE (arg0_type);
3903 /* (-x) IN [a,b] -> x in [-b, -a] */
3904 n_low = range_binop (MINUS_EXPR, exp_type,
3905 build_int_cst (exp_type, 0),
3906 0, high, 1);
3907 n_high = range_binop (MINUS_EXPR, exp_type,
3908 build_int_cst (exp_type, 0),
3909 0, low, 0);
3910 if (n_high != 0 && TREE_OVERFLOW (n_high))
3911 return NULL_TREE;
3912 goto normalize;
3914 case BIT_NOT_EXPR:
3915 /* ~ X -> -X - 1 */
3916 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3917 build_int_cst (exp_type, 1));
3919 case PLUS_EXPR:
3920 case MINUS_EXPR:
3921 if (TREE_CODE (arg1) != INTEGER_CST)
3922 return NULL_TREE;
3924 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3925 move a constant to the other side. */
3926 if (!TYPE_UNSIGNED (arg0_type)
3927 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3928 return NULL_TREE;
3930 /* If EXP is signed, any overflow in the computation is undefined,
3931 so we don't worry about it so long as our computations on
3932 the bounds don't overflow. For unsigned, overflow is defined
3933 and this is exactly the right thing. */
3934 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3935 arg0_type, low, 0, arg1, 0);
3936 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3937 arg0_type, high, 1, arg1, 0);
3938 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3939 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3940 return NULL_TREE;
3942 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3943 *strict_overflow_p = true;
3945 normalize:
3946 /* Check for an unsigned range which has wrapped around the maximum
3947 value thus making n_high < n_low, and normalize it. */
3948 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3950 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3951 integer_one_node, 0);
3952 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3953 integer_one_node, 0);
3955 /* If the range is of the form +/- [ x+1, x ], we won't
3956 be able to normalize it. But then, it represents the
3957 whole range or the empty set, so make it
3958 +/- [ -, - ]. */
3959 if (tree_int_cst_equal (n_low, low)
3960 && tree_int_cst_equal (n_high, high))
3961 low = high = 0;
3962 else
3963 in_p = ! in_p;
3965 else
3966 low = n_low, high = n_high;
3968 *p_low = low;
3969 *p_high = high;
3970 *p_in_p = in_p;
3971 return arg0;
3973 CASE_CONVERT:
3974 case NON_LVALUE_EXPR:
3975 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3976 return NULL_TREE;
3978 if (! INTEGRAL_TYPE_P (arg0_type)
3979 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3980 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3981 return NULL_TREE;
3983 n_low = low, n_high = high;
3985 if (n_low != 0)
3986 n_low = fold_convert_loc (loc, arg0_type, n_low);
3988 if (n_high != 0)
3989 n_high = fold_convert_loc (loc, arg0_type, n_high);
3991 /* If we're converting arg0 from an unsigned type, to exp,
3992 a signed type, we will be doing the comparison as unsigned.
3993 The tests above have already verified that LOW and HIGH
3994 are both positive.
3996 So we have to ensure that we will handle large unsigned
3997 values the same way that the current signed bounds treat
3998 negative values. */
4000 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4002 tree high_positive;
4003 tree equiv_type;
4004 /* For fixed-point modes, we need to pass the saturating flag
4005 as the 2nd parameter. */
4006 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4007 equiv_type
4008 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4009 TYPE_SATURATING (arg0_type));
4010 else
4011 equiv_type
4012 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4014 /* A range without an upper bound is, naturally, unbounded.
4015 Since convert would have cropped a very large value, use
4016 the max value for the destination type. */
4017 high_positive
4018 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4019 : TYPE_MAX_VALUE (arg0_type);
4021 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4022 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4023 fold_convert_loc (loc, arg0_type,
4024 high_positive),
4025 build_int_cst (arg0_type, 1));
4027 /* If the low bound is specified, "and" the range with the
4028 range for which the original unsigned value will be
4029 positive. */
4030 if (low != 0)
4032 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4033 1, fold_convert_loc (loc, arg0_type,
4034 integer_zero_node),
4035 high_positive))
4036 return NULL_TREE;
4038 in_p = (n_in_p == in_p);
4040 else
4042 /* Otherwise, "or" the range with the range of the input
4043 that will be interpreted as negative. */
4044 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4045 1, fold_convert_loc (loc, arg0_type,
4046 integer_zero_node),
4047 high_positive))
4048 return NULL_TREE;
4050 in_p = (in_p != n_in_p);
4054 *p_low = n_low;
4055 *p_high = n_high;
4056 *p_in_p = in_p;
4057 return arg0;
4059 default:
4060 return NULL_TREE;
4064 /* Given EXP, a logical expression, set the range it is testing into
4065 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4066 actually being tested. *PLOW and *PHIGH will be made of the same
4067 type as the returned expression. If EXP is not a comparison, we
4068 will most likely not be returning a useful value and range. Set
4069 *STRICT_OVERFLOW_P to true if the return value is only valid
4070 because signed overflow is undefined; otherwise, do not change
4071 *STRICT_OVERFLOW_P. */
4073 tree
4074 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4075 bool *strict_overflow_p)
4077 enum tree_code code;
4078 tree arg0, arg1 = NULL_TREE;
4079 tree exp_type, nexp;
4080 int in_p;
4081 tree low, high;
4082 location_t loc = EXPR_LOCATION (exp);
4084 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4085 and see if we can refine the range. Some of the cases below may not
4086 happen, but it doesn't seem worth worrying about this. We "continue"
4087 the outer loop when we've changed something; otherwise we "break"
4088 the switch, which will "break" the while. */
4090 in_p = 0;
4091 low = high = build_int_cst (TREE_TYPE (exp), 0);
4093 while (1)
4095 code = TREE_CODE (exp);
4096 exp_type = TREE_TYPE (exp);
4097 arg0 = NULL_TREE;
4099 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4101 if (TREE_OPERAND_LENGTH (exp) > 0)
4102 arg0 = TREE_OPERAND (exp, 0);
4103 if (TREE_CODE_CLASS (code) == tcc_binary
4104 || TREE_CODE_CLASS (code) == tcc_comparison
4105 || (TREE_CODE_CLASS (code) == tcc_expression
4106 && TREE_OPERAND_LENGTH (exp) > 1))
4107 arg1 = TREE_OPERAND (exp, 1);
4109 if (arg0 == NULL_TREE)
4110 break;
4112 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4113 &high, &in_p, strict_overflow_p);
4114 if (nexp == NULL_TREE)
4115 break;
4116 exp = nexp;
4119 /* If EXP is a constant, we can evaluate whether this is true or false. */
4120 if (TREE_CODE (exp) == INTEGER_CST)
4122 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4123 exp, 0, low, 0))
4124 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4125 exp, 1, high, 1)));
4126 low = high = 0;
4127 exp = 0;
4130 *pin_p = in_p, *plow = low, *phigh = high;
4131 return exp;
4134 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4135 type, TYPE, return an expression to test if EXP is in (or out of, depending
4136 on IN_P) the range. Return 0 if the test couldn't be created. */
4138 tree
4139 build_range_check (location_t loc, tree type, tree exp, int in_p,
4140 tree low, tree high)
4142 tree etype = TREE_TYPE (exp), value;
4144 #ifdef HAVE_canonicalize_funcptr_for_compare
4145 /* Disable this optimization for function pointer expressions
4146 on targets that require function pointer canonicalization. */
4147 if (HAVE_canonicalize_funcptr_for_compare
4148 && TREE_CODE (etype) == POINTER_TYPE
4149 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4150 return NULL_TREE;
4151 #endif
4153 if (! in_p)
4155 value = build_range_check (loc, type, exp, 1, low, high);
4156 if (value != 0)
4157 return invert_truthvalue_loc (loc, value);
4159 return 0;
4162 if (low == 0 && high == 0)
4163 return build_int_cst (type, 1);
4165 if (low == 0)
4166 return fold_build2_loc (loc, LE_EXPR, type, exp,
4167 fold_convert_loc (loc, etype, high));
4169 if (high == 0)
4170 return fold_build2_loc (loc, GE_EXPR, type, exp,
4171 fold_convert_loc (loc, etype, low));
4173 if (operand_equal_p (low, high, 0))
4174 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4175 fold_convert_loc (loc, etype, low));
4177 if (integer_zerop (low))
4179 if (! TYPE_UNSIGNED (etype))
4181 etype = unsigned_type_for (etype);
4182 high = fold_convert_loc (loc, etype, high);
4183 exp = fold_convert_loc (loc, etype, exp);
4185 return build_range_check (loc, type, exp, 1, 0, high);
4188 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4189 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4191 unsigned HOST_WIDE_INT lo;
4192 HOST_WIDE_INT hi;
4193 int prec;
4195 prec = TYPE_PRECISION (etype);
4196 if (prec <= HOST_BITS_PER_WIDE_INT)
4198 hi = 0;
4199 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4201 else
4203 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4204 lo = (unsigned HOST_WIDE_INT) -1;
4207 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4209 if (TYPE_UNSIGNED (etype))
4211 tree signed_etype = signed_type_for (etype);
4212 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4213 etype
4214 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4215 else
4216 etype = signed_etype;
4217 exp = fold_convert_loc (loc, etype, exp);
4219 return fold_build2_loc (loc, GT_EXPR, type, exp,
4220 build_int_cst (etype, 0));
4224 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4225 This requires wrap-around arithmetics for the type of the expression.
4226 First make sure that arithmetics in this type is valid, then make sure
4227 that it wraps around. */
4228 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4229 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4230 TYPE_UNSIGNED (etype));
4232 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4234 tree utype, minv, maxv;
4236 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4237 for the type in question, as we rely on this here. */
4238 utype = unsigned_type_for (etype);
4239 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4240 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4241 integer_one_node, 1);
4242 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4244 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4245 minv, 1, maxv, 1)))
4246 etype = utype;
4247 else
4248 return 0;
4251 high = fold_convert_loc (loc, etype, high);
4252 low = fold_convert_loc (loc, etype, low);
4253 exp = fold_convert_loc (loc, etype, exp);
4255 value = const_binop (MINUS_EXPR, high, low);
4258 if (POINTER_TYPE_P (etype))
4260 if (value != 0 && !TREE_OVERFLOW (value))
4262 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4263 return build_range_check (loc, type,
4264 fold_build_pointer_plus_loc (loc, exp, low),
4265 1, build_int_cst (etype, 0), value);
4267 return 0;
4270 if (value != 0 && !TREE_OVERFLOW (value))
4271 return build_range_check (loc, type,
4272 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4273 1, build_int_cst (etype, 0), value);
4275 return 0;
4278 /* Return the predecessor of VAL in its type, handling the infinite case. */
4280 static tree
4281 range_predecessor (tree val)
4283 tree type = TREE_TYPE (val);
4285 if (INTEGRAL_TYPE_P (type)
4286 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4287 return 0;
4288 else
4289 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4292 /* Return the successor of VAL in its type, handling the infinite case. */
4294 static tree
4295 range_successor (tree val)
4297 tree type = TREE_TYPE (val);
4299 if (INTEGRAL_TYPE_P (type)
4300 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4301 return 0;
4302 else
4303 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4306 /* Given two ranges, see if we can merge them into one. Return 1 if we
4307 can, 0 if we can't. Set the output range into the specified parameters. */
4309 bool
4310 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4311 tree high0, int in1_p, tree low1, tree high1)
4313 int no_overlap;
4314 int subset;
4315 int temp;
4316 tree tem;
4317 int in_p;
4318 tree low, high;
4319 int lowequal = ((low0 == 0 && low1 == 0)
4320 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4321 low0, 0, low1, 0)));
4322 int highequal = ((high0 == 0 && high1 == 0)
4323 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4324 high0, 1, high1, 1)));
4326 /* Make range 0 be the range that starts first, or ends last if they
4327 start at the same value. Swap them if it isn't. */
4328 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4329 low0, 0, low1, 0))
4330 || (lowequal
4331 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4332 high1, 1, high0, 1))))
4334 temp = in0_p, in0_p = in1_p, in1_p = temp;
4335 tem = low0, low0 = low1, low1 = tem;
4336 tem = high0, high0 = high1, high1 = tem;
4339 /* Now flag two cases, whether the ranges are disjoint or whether the
4340 second range is totally subsumed in the first. Note that the tests
4341 below are simplified by the ones above. */
4342 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4343 high0, 1, low1, 0));
4344 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4345 high1, 1, high0, 1));
4347 /* We now have four cases, depending on whether we are including or
4348 excluding the two ranges. */
4349 if (in0_p && in1_p)
4351 /* If they don't overlap, the result is false. If the second range
4352 is a subset it is the result. Otherwise, the range is from the start
4353 of the second to the end of the first. */
4354 if (no_overlap)
4355 in_p = 0, low = high = 0;
4356 else if (subset)
4357 in_p = 1, low = low1, high = high1;
4358 else
4359 in_p = 1, low = low1, high = high0;
4362 else if (in0_p && ! in1_p)
4364 /* If they don't overlap, the result is the first range. If they are
4365 equal, the result is false. If the second range is a subset of the
4366 first, and the ranges begin at the same place, we go from just after
4367 the end of the second range to the end of the first. If the second
4368 range is not a subset of the first, or if it is a subset and both
4369 ranges end at the same place, the range starts at the start of the
4370 first range and ends just before the second range.
4371 Otherwise, we can't describe this as a single range. */
4372 if (no_overlap)
4373 in_p = 1, low = low0, high = high0;
4374 else if (lowequal && highequal)
4375 in_p = 0, low = high = 0;
4376 else if (subset && lowequal)
4378 low = range_successor (high1);
4379 high = high0;
4380 in_p = 1;
4381 if (low == 0)
4383 /* We are in the weird situation where high0 > high1 but
4384 high1 has no successor. Punt. */
4385 return 0;
4388 else if (! subset || highequal)
4390 low = low0;
4391 high = range_predecessor (low1);
4392 in_p = 1;
4393 if (high == 0)
4395 /* low0 < low1 but low1 has no predecessor. Punt. */
4396 return 0;
4399 else
4400 return 0;
4403 else if (! in0_p && in1_p)
4405 /* If they don't overlap, the result is the second range. If the second
4406 is a subset of the first, the result is false. Otherwise,
4407 the range starts just after the first range and ends at the
4408 end of the second. */
4409 if (no_overlap)
4410 in_p = 1, low = low1, high = high1;
4411 else if (subset || highequal)
4412 in_p = 0, low = high = 0;
4413 else
4415 low = range_successor (high0);
4416 high = high1;
4417 in_p = 1;
4418 if (low == 0)
4420 /* high1 > high0 but high0 has no successor. Punt. */
4421 return 0;
4426 else
4428 /* The case where we are excluding both ranges. Here the complex case
4429 is if they don't overlap. In that case, the only time we have a
4430 range is if they are adjacent. If the second is a subset of the
4431 first, the result is the first. Otherwise, the range to exclude
4432 starts at the beginning of the first range and ends at the end of the
4433 second. */
4434 if (no_overlap)
4436 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4437 range_successor (high0),
4438 1, low1, 0)))
4439 in_p = 0, low = low0, high = high1;
4440 else
4442 /* Canonicalize - [min, x] into - [-, x]. */
4443 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4444 switch (TREE_CODE (TREE_TYPE (low0)))
4446 case ENUMERAL_TYPE:
4447 if (TYPE_PRECISION (TREE_TYPE (low0))
4448 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4449 break;
4450 /* FALLTHROUGH */
4451 case INTEGER_TYPE:
4452 if (tree_int_cst_equal (low0,
4453 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4454 low0 = 0;
4455 break;
4456 case POINTER_TYPE:
4457 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4458 && integer_zerop (low0))
4459 low0 = 0;
4460 break;
4461 default:
4462 break;
4465 /* Canonicalize - [x, max] into - [x, -]. */
4466 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4467 switch (TREE_CODE (TREE_TYPE (high1)))
4469 case ENUMERAL_TYPE:
4470 if (TYPE_PRECISION (TREE_TYPE (high1))
4471 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4472 break;
4473 /* FALLTHROUGH */
4474 case INTEGER_TYPE:
4475 if (tree_int_cst_equal (high1,
4476 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4477 high1 = 0;
4478 break;
4479 case POINTER_TYPE:
4480 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4481 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4482 high1, 1,
4483 integer_one_node, 1)))
4484 high1 = 0;
4485 break;
4486 default:
4487 break;
4490 /* The ranges might be also adjacent between the maximum and
4491 minimum values of the given type. For
4492 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4493 return + [x + 1, y - 1]. */
4494 if (low0 == 0 && high1 == 0)
4496 low = range_successor (high0);
4497 high = range_predecessor (low1);
4498 if (low == 0 || high == 0)
4499 return 0;
4501 in_p = 1;
4503 else
4504 return 0;
4507 else if (subset)
4508 in_p = 0, low = low0, high = high0;
4509 else
4510 in_p = 0, low = low0, high = high1;
4513 *pin_p = in_p, *plow = low, *phigh = high;
4514 return 1;
4518 /* Subroutine of fold, looking inside expressions of the form
4519 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4520 of the COND_EXPR. This function is being used also to optimize
4521 A op B ? C : A, by reversing the comparison first.
4523 Return a folded expression whose code is not a COND_EXPR
4524 anymore, or NULL_TREE if no folding opportunity is found. */
4526 static tree
4527 fold_cond_expr_with_comparison (location_t loc, tree type,
4528 tree arg0, tree arg1, tree arg2)
4530 enum tree_code comp_code = TREE_CODE (arg0);
4531 tree arg00 = TREE_OPERAND (arg0, 0);
4532 tree arg01 = TREE_OPERAND (arg0, 1);
4533 tree arg1_type = TREE_TYPE (arg1);
4534 tree tem;
4536 STRIP_NOPS (arg1);
4537 STRIP_NOPS (arg2);
4539 /* If we have A op 0 ? A : -A, consider applying the following
4540 transformations:
4542 A == 0? A : -A same as -A
4543 A != 0? A : -A same as A
4544 A >= 0? A : -A same as abs (A)
4545 A > 0? A : -A same as abs (A)
4546 A <= 0? A : -A same as -abs (A)
4547 A < 0? A : -A same as -abs (A)
4549 None of these transformations work for modes with signed
4550 zeros. If A is +/-0, the first two transformations will
4551 change the sign of the result (from +0 to -0, or vice
4552 versa). The last four will fix the sign of the result,
4553 even though the original expressions could be positive or
4554 negative, depending on the sign of A.
4556 Note that all these transformations are correct if A is
4557 NaN, since the two alternatives (A and -A) are also NaNs. */
4558 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4559 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4560 ? real_zerop (arg01)
4561 : integer_zerop (arg01))
4562 && ((TREE_CODE (arg2) == NEGATE_EXPR
4563 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4564 /* In the case that A is of the form X-Y, '-A' (arg2) may
4565 have already been folded to Y-X, check for that. */
4566 || (TREE_CODE (arg1) == MINUS_EXPR
4567 && TREE_CODE (arg2) == MINUS_EXPR
4568 && operand_equal_p (TREE_OPERAND (arg1, 0),
4569 TREE_OPERAND (arg2, 1), 0)
4570 && operand_equal_p (TREE_OPERAND (arg1, 1),
4571 TREE_OPERAND (arg2, 0), 0))))
4572 switch (comp_code)
4574 case EQ_EXPR:
4575 case UNEQ_EXPR:
4576 tem = fold_convert_loc (loc, arg1_type, arg1);
4577 return pedantic_non_lvalue_loc (loc,
4578 fold_convert_loc (loc, type,
4579 negate_expr (tem)));
4580 case NE_EXPR:
4581 case LTGT_EXPR:
4582 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4583 case UNGE_EXPR:
4584 case UNGT_EXPR:
4585 if (flag_trapping_math)
4586 break;
4587 /* Fall through. */
4588 case GE_EXPR:
4589 case GT_EXPR:
4590 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4591 arg1 = fold_convert_loc (loc, signed_type_for
4592 (TREE_TYPE (arg1)), arg1);
4593 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4594 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4595 case UNLE_EXPR:
4596 case UNLT_EXPR:
4597 if (flag_trapping_math)
4598 break;
4599 case LE_EXPR:
4600 case LT_EXPR:
4601 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4602 arg1 = fold_convert_loc (loc, signed_type_for
4603 (TREE_TYPE (arg1)), arg1);
4604 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4605 return negate_expr (fold_convert_loc (loc, type, tem));
4606 default:
4607 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4608 break;
4611 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4612 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4613 both transformations are correct when A is NaN: A != 0
4614 is then true, and A == 0 is false. */
4616 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4617 && integer_zerop (arg01) && integer_zerop (arg2))
4619 if (comp_code == NE_EXPR)
4620 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4621 else if (comp_code == EQ_EXPR)
4622 return build_int_cst (type, 0);
4625 /* Try some transformations of A op B ? A : B.
4627 A == B? A : B same as B
4628 A != B? A : B same as A
4629 A >= B? A : B same as max (A, B)
4630 A > B? A : B same as max (B, A)
4631 A <= B? A : B same as min (A, B)
4632 A < B? A : B same as min (B, A)
4634 As above, these transformations don't work in the presence
4635 of signed zeros. For example, if A and B are zeros of
4636 opposite sign, the first two transformations will change
4637 the sign of the result. In the last four, the original
4638 expressions give different results for (A=+0, B=-0) and
4639 (A=-0, B=+0), but the transformed expressions do not.
4641 The first two transformations are correct if either A or B
4642 is a NaN. In the first transformation, the condition will
4643 be false, and B will indeed be chosen. In the case of the
4644 second transformation, the condition A != B will be true,
4645 and A will be chosen.
4647 The conversions to max() and min() are not correct if B is
4648 a number and A is not. The conditions in the original
4649 expressions will be false, so all four give B. The min()
4650 and max() versions would give a NaN instead. */
4651 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4652 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4653 /* Avoid these transformations if the COND_EXPR may be used
4654 as an lvalue in the C++ front-end. PR c++/19199. */
4655 && (in_gimple_form
4656 || (strcmp (lang_hooks.name, "GNU C++") != 0
4657 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4658 || ! maybe_lvalue_p (arg1)
4659 || ! maybe_lvalue_p (arg2)))
4661 tree comp_op0 = arg00;
4662 tree comp_op1 = arg01;
4663 tree comp_type = TREE_TYPE (comp_op0);
4665 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4666 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4668 comp_type = type;
4669 comp_op0 = arg1;
4670 comp_op1 = arg2;
4673 switch (comp_code)
4675 case EQ_EXPR:
4676 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4677 case NE_EXPR:
4678 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4679 case LE_EXPR:
4680 case LT_EXPR:
4681 case UNLE_EXPR:
4682 case UNLT_EXPR:
4683 /* In C++ a ?: expression can be an lvalue, so put the
4684 operand which will be used if they are equal first
4685 so that we can convert this back to the
4686 corresponding COND_EXPR. */
4687 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4689 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4690 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4691 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4692 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4693 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4694 comp_op1, comp_op0);
4695 return pedantic_non_lvalue_loc (loc,
4696 fold_convert_loc (loc, type, tem));
4698 break;
4699 case GE_EXPR:
4700 case GT_EXPR:
4701 case UNGE_EXPR:
4702 case UNGT_EXPR:
4703 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4705 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4706 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4707 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4708 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4709 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4710 comp_op1, comp_op0);
4711 return pedantic_non_lvalue_loc (loc,
4712 fold_convert_loc (loc, type, tem));
4714 break;
4715 case UNEQ_EXPR:
4716 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4717 return pedantic_non_lvalue_loc (loc,
4718 fold_convert_loc (loc, type, arg2));
4719 break;
4720 case LTGT_EXPR:
4721 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4722 return pedantic_non_lvalue_loc (loc,
4723 fold_convert_loc (loc, type, arg1));
4724 break;
4725 default:
4726 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4727 break;
4731 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4732 we might still be able to simplify this. For example,
4733 if C1 is one less or one more than C2, this might have started
4734 out as a MIN or MAX and been transformed by this function.
4735 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4737 if (INTEGRAL_TYPE_P (type)
4738 && TREE_CODE (arg01) == INTEGER_CST
4739 && TREE_CODE (arg2) == INTEGER_CST)
4740 switch (comp_code)
4742 case EQ_EXPR:
4743 if (TREE_CODE (arg1) == INTEGER_CST)
4744 break;
4745 /* We can replace A with C1 in this case. */
4746 arg1 = fold_convert_loc (loc, type, arg01);
4747 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4749 case LT_EXPR:
4750 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4751 MIN_EXPR, to preserve the signedness of the comparison. */
4752 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4753 OEP_ONLY_CONST)
4754 && operand_equal_p (arg01,
4755 const_binop (PLUS_EXPR, arg2,
4756 build_int_cst (type, 1)),
4757 OEP_ONLY_CONST))
4759 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4760 fold_convert_loc (loc, TREE_TYPE (arg00),
4761 arg2));
4762 return pedantic_non_lvalue_loc (loc,
4763 fold_convert_loc (loc, type, tem));
4765 break;
4767 case LE_EXPR:
4768 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4769 as above. */
4770 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4771 OEP_ONLY_CONST)
4772 && operand_equal_p (arg01,
4773 const_binop (MINUS_EXPR, arg2,
4774 build_int_cst (type, 1)),
4775 OEP_ONLY_CONST))
4777 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4778 fold_convert_loc (loc, TREE_TYPE (arg00),
4779 arg2));
4780 return pedantic_non_lvalue_loc (loc,
4781 fold_convert_loc (loc, type, tem));
4783 break;
4785 case GT_EXPR:
4786 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4787 MAX_EXPR, to preserve the signedness of the comparison. */
4788 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4789 OEP_ONLY_CONST)
4790 && operand_equal_p (arg01,
4791 const_binop (MINUS_EXPR, arg2,
4792 build_int_cst (type, 1)),
4793 OEP_ONLY_CONST))
4795 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4796 fold_convert_loc (loc, TREE_TYPE (arg00),
4797 arg2));
4798 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4800 break;
4802 case GE_EXPR:
4803 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4804 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4805 OEP_ONLY_CONST)
4806 && operand_equal_p (arg01,
4807 const_binop (PLUS_EXPR, arg2,
4808 build_int_cst (type, 1)),
4809 OEP_ONLY_CONST))
4811 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4812 fold_convert_loc (loc, TREE_TYPE (arg00),
4813 arg2));
4814 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4816 break;
4817 case NE_EXPR:
4818 break;
4819 default:
4820 gcc_unreachable ();
4823 return NULL_TREE;
4828 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4829 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4830 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4831 false) >= 2)
4832 #endif
4834 /* EXP is some logical combination of boolean tests. See if we can
4835 merge it into some range test. Return the new tree if so. */
4837 static tree
4838 fold_range_test (location_t loc, enum tree_code code, tree type,
4839 tree op0, tree op1)
4841 int or_op = (code == TRUTH_ORIF_EXPR
4842 || code == TRUTH_OR_EXPR);
4843 int in0_p, in1_p, in_p;
4844 tree low0, low1, low, high0, high1, high;
4845 bool strict_overflow_p = false;
4846 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4847 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4848 tree tem;
4849 const char * const warnmsg = G_("assuming signed overflow does not occur "
4850 "when simplifying range test");
4852 /* If this is an OR operation, invert both sides; we will invert
4853 again at the end. */
4854 if (or_op)
4855 in0_p = ! in0_p, in1_p = ! in1_p;
4857 /* If both expressions are the same, if we can merge the ranges, and we
4858 can build the range test, return it or it inverted. If one of the
4859 ranges is always true or always false, consider it to be the same
4860 expression as the other. */
4861 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4862 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4863 in1_p, low1, high1)
4864 && 0 != (tem = (build_range_check (loc, type,
4865 lhs != 0 ? lhs
4866 : rhs != 0 ? rhs : integer_zero_node,
4867 in_p, low, high))))
4869 if (strict_overflow_p)
4870 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4871 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4874 /* On machines where the branch cost is expensive, if this is a
4875 short-circuited branch and the underlying object on both sides
4876 is the same, make a non-short-circuit operation. */
4877 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4878 && lhs != 0 && rhs != 0
4879 && (code == TRUTH_ANDIF_EXPR
4880 || code == TRUTH_ORIF_EXPR)
4881 && operand_equal_p (lhs, rhs, 0))
4883 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4884 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4885 which cases we can't do this. */
4886 if (simple_operand_p (lhs))
4887 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4888 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4889 type, op0, op1);
4891 else if (!lang_hooks.decls.global_bindings_p ()
4892 && !CONTAINS_PLACEHOLDER_P (lhs))
4894 tree common = save_expr (lhs);
4896 if (0 != (lhs = build_range_check (loc, type, common,
4897 or_op ? ! in0_p : in0_p,
4898 low0, high0))
4899 && (0 != (rhs = build_range_check (loc, type, common,
4900 or_op ? ! in1_p : in1_p,
4901 low1, high1))))
4903 if (strict_overflow_p)
4904 fold_overflow_warning (warnmsg,
4905 WARN_STRICT_OVERFLOW_COMPARISON);
4906 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4907 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4908 type, lhs, rhs);
4913 return 0;
4916 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4917 bit value. Arrange things so the extra bits will be set to zero if and
4918 only if C is signed-extended to its full width. If MASK is nonzero,
4919 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4921 static tree
4922 unextend (tree c, int p, int unsignedp, tree mask)
4924 tree type = TREE_TYPE (c);
4925 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4926 tree temp;
4928 if (p == modesize || unsignedp)
4929 return c;
4931 /* We work by getting just the sign bit into the low-order bit, then
4932 into the high-order bit, then sign-extend. We then XOR that value
4933 with C. */
4934 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4935 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4937 /* We must use a signed type in order to get an arithmetic right shift.
4938 However, we must also avoid introducing accidental overflows, so that
4939 a subsequent call to integer_zerop will work. Hence we must
4940 do the type conversion here. At this point, the constant is either
4941 zero or one, and the conversion to a signed type can never overflow.
4942 We could get an overflow if this conversion is done anywhere else. */
4943 if (TYPE_UNSIGNED (type))
4944 temp = fold_convert (signed_type_for (type), temp);
4946 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4947 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4948 if (mask != 0)
4949 temp = const_binop (BIT_AND_EXPR, temp,
4950 fold_convert (TREE_TYPE (c), mask));
4951 /* If necessary, convert the type back to match the type of C. */
4952 if (TYPE_UNSIGNED (type))
4953 temp = fold_convert (type, temp);
4955 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4958 /* For an expression that has the form
4959 (A && B) || ~B
4961 (A || B) && ~B,
4962 we can drop one of the inner expressions and simplify to
4963 A || ~B
4965 A && ~B
4966 LOC is the location of the resulting expression. OP is the inner
4967 logical operation; the left-hand side in the examples above, while CMPOP
4968 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4969 removing a condition that guards another, as in
4970 (A != NULL && A->...) || A == NULL
4971 which we must not transform. If RHS_ONLY is true, only eliminate the
4972 right-most operand of the inner logical operation. */
4974 static tree
4975 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4976 bool rhs_only)
4978 tree type = TREE_TYPE (cmpop);
4979 enum tree_code code = TREE_CODE (cmpop);
4980 enum tree_code truthop_code = TREE_CODE (op);
4981 tree lhs = TREE_OPERAND (op, 0);
4982 tree rhs = TREE_OPERAND (op, 1);
4983 tree orig_lhs = lhs, orig_rhs = rhs;
4984 enum tree_code rhs_code = TREE_CODE (rhs);
4985 enum tree_code lhs_code = TREE_CODE (lhs);
4986 enum tree_code inv_code;
4988 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4989 return NULL_TREE;
4991 if (TREE_CODE_CLASS (code) != tcc_comparison)
4992 return NULL_TREE;
4994 if (rhs_code == truthop_code)
4996 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4997 if (newrhs != NULL_TREE)
4999 rhs = newrhs;
5000 rhs_code = TREE_CODE (rhs);
5003 if (lhs_code == truthop_code && !rhs_only)
5005 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5006 if (newlhs != NULL_TREE)
5008 lhs = newlhs;
5009 lhs_code = TREE_CODE (lhs);
5013 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5014 if (inv_code == rhs_code
5015 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5016 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5017 return lhs;
5018 if (!rhs_only && inv_code == lhs_code
5019 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5020 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5021 return rhs;
5022 if (rhs != orig_rhs || lhs != orig_lhs)
5023 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5024 lhs, rhs);
5025 return NULL_TREE;
5028 /* Find ways of folding logical expressions of LHS and RHS:
5029 Try to merge two comparisons to the same innermost item.
5030 Look for range tests like "ch >= '0' && ch <= '9'".
5031 Look for combinations of simple terms on machines with expensive branches
5032 and evaluate the RHS unconditionally.
5034 For example, if we have p->a == 2 && p->b == 4 and we can make an
5035 object large enough to span both A and B, we can do this with a comparison
5036 against the object ANDed with the a mask.
5038 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5039 operations to do this with one comparison.
5041 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5042 function and the one above.
5044 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5045 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5047 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5048 two operands.
5050 We return the simplified tree or 0 if no optimization is possible. */
5052 static tree
5053 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5054 tree lhs, tree rhs)
5056 /* If this is the "or" of two comparisons, we can do something if
5057 the comparisons are NE_EXPR. If this is the "and", we can do something
5058 if the comparisons are EQ_EXPR. I.e.,
5059 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5061 WANTED_CODE is this operation code. For single bit fields, we can
5062 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5063 comparison for one-bit fields. */
5065 enum tree_code wanted_code;
5066 enum tree_code lcode, rcode;
5067 tree ll_arg, lr_arg, rl_arg, rr_arg;
5068 tree ll_inner, lr_inner, rl_inner, rr_inner;
5069 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5070 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5071 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5072 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5073 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5074 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5075 enum machine_mode lnmode, rnmode;
5076 tree ll_mask, lr_mask, rl_mask, rr_mask;
5077 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5078 tree l_const, r_const;
5079 tree lntype, rntype, result;
5080 HOST_WIDE_INT first_bit, end_bit;
5081 int volatilep;
5083 /* Start by getting the comparison codes. Fail if anything is volatile.
5084 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5085 it were surrounded with a NE_EXPR. */
5087 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5088 return 0;
5090 lcode = TREE_CODE (lhs);
5091 rcode = TREE_CODE (rhs);
5093 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5095 lhs = build2 (NE_EXPR, truth_type, lhs,
5096 build_int_cst (TREE_TYPE (lhs), 0));
5097 lcode = NE_EXPR;
5100 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5102 rhs = build2 (NE_EXPR, truth_type, rhs,
5103 build_int_cst (TREE_TYPE (rhs), 0));
5104 rcode = NE_EXPR;
5107 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5108 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5109 return 0;
5111 ll_arg = TREE_OPERAND (lhs, 0);
5112 lr_arg = TREE_OPERAND (lhs, 1);
5113 rl_arg = TREE_OPERAND (rhs, 0);
5114 rr_arg = TREE_OPERAND (rhs, 1);
5116 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5117 if (simple_operand_p (ll_arg)
5118 && simple_operand_p (lr_arg))
5120 if (operand_equal_p (ll_arg, rl_arg, 0)
5121 && operand_equal_p (lr_arg, rr_arg, 0))
5123 result = combine_comparisons (loc, code, lcode, rcode,
5124 truth_type, ll_arg, lr_arg);
5125 if (result)
5126 return result;
5128 else if (operand_equal_p (ll_arg, rr_arg, 0)
5129 && operand_equal_p (lr_arg, rl_arg, 0))
5131 result = combine_comparisons (loc, code, lcode,
5132 swap_tree_comparison (rcode),
5133 truth_type, ll_arg, lr_arg);
5134 if (result)
5135 return result;
5139 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5140 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5142 /* If the RHS can be evaluated unconditionally and its operands are
5143 simple, it wins to evaluate the RHS unconditionally on machines
5144 with expensive branches. In this case, this isn't a comparison
5145 that can be merged. */
5147 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5148 false) >= 2
5149 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5150 && simple_operand_p (rl_arg)
5151 && simple_operand_p (rr_arg))
5153 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5154 if (code == TRUTH_OR_EXPR
5155 && lcode == NE_EXPR && integer_zerop (lr_arg)
5156 && rcode == NE_EXPR && integer_zerop (rr_arg)
5157 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5158 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5159 return build2_loc (loc, NE_EXPR, truth_type,
5160 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5161 ll_arg, rl_arg),
5162 build_int_cst (TREE_TYPE (ll_arg), 0));
5164 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5165 if (code == TRUTH_AND_EXPR
5166 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5167 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5168 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5169 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5170 return build2_loc (loc, EQ_EXPR, truth_type,
5171 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5172 ll_arg, rl_arg),
5173 build_int_cst (TREE_TYPE (ll_arg), 0));
5176 /* See if the comparisons can be merged. Then get all the parameters for
5177 each side. */
5179 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5180 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5181 return 0;
5183 volatilep = 0;
5184 ll_inner = decode_field_reference (loc, ll_arg,
5185 &ll_bitsize, &ll_bitpos, &ll_mode,
5186 &ll_unsignedp, &volatilep, &ll_mask,
5187 &ll_and_mask);
5188 lr_inner = decode_field_reference (loc, lr_arg,
5189 &lr_bitsize, &lr_bitpos, &lr_mode,
5190 &lr_unsignedp, &volatilep, &lr_mask,
5191 &lr_and_mask);
5192 rl_inner = decode_field_reference (loc, rl_arg,
5193 &rl_bitsize, &rl_bitpos, &rl_mode,
5194 &rl_unsignedp, &volatilep, &rl_mask,
5195 &rl_and_mask);
5196 rr_inner = decode_field_reference (loc, rr_arg,
5197 &rr_bitsize, &rr_bitpos, &rr_mode,
5198 &rr_unsignedp, &volatilep, &rr_mask,
5199 &rr_and_mask);
5201 /* It must be true that the inner operation on the lhs of each
5202 comparison must be the same if we are to be able to do anything.
5203 Then see if we have constants. If not, the same must be true for
5204 the rhs's. */
5205 if (volatilep || ll_inner == 0 || rl_inner == 0
5206 || ! operand_equal_p (ll_inner, rl_inner, 0))
5207 return 0;
5209 if (TREE_CODE (lr_arg) == INTEGER_CST
5210 && TREE_CODE (rr_arg) == INTEGER_CST)
5211 l_const = lr_arg, r_const = rr_arg;
5212 else if (lr_inner == 0 || rr_inner == 0
5213 || ! operand_equal_p (lr_inner, rr_inner, 0))
5214 return 0;
5215 else
5216 l_const = r_const = 0;
5218 /* If either comparison code is not correct for our logical operation,
5219 fail. However, we can convert a one-bit comparison against zero into
5220 the opposite comparison against that bit being set in the field. */
5222 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5223 if (lcode != wanted_code)
5225 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5227 /* Make the left operand unsigned, since we are only interested
5228 in the value of one bit. Otherwise we are doing the wrong
5229 thing below. */
5230 ll_unsignedp = 1;
5231 l_const = ll_mask;
5233 else
5234 return 0;
5237 /* This is analogous to the code for l_const above. */
5238 if (rcode != wanted_code)
5240 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5242 rl_unsignedp = 1;
5243 r_const = rl_mask;
5245 else
5246 return 0;
5249 /* See if we can find a mode that contains both fields being compared on
5250 the left. If we can't, fail. Otherwise, update all constants and masks
5251 to be relative to a field of that size. */
5252 first_bit = MIN (ll_bitpos, rl_bitpos);
5253 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5254 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5255 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5256 volatilep);
5257 if (lnmode == VOIDmode)
5258 return 0;
5260 lnbitsize = GET_MODE_BITSIZE (lnmode);
5261 lnbitpos = first_bit & ~ (lnbitsize - 1);
5262 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5263 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5265 if (BYTES_BIG_ENDIAN)
5267 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5268 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5271 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5272 size_int (xll_bitpos));
5273 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5274 size_int (xrl_bitpos));
5276 if (l_const)
5278 l_const = fold_convert_loc (loc, lntype, l_const);
5279 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5280 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5281 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5282 fold_build1_loc (loc, BIT_NOT_EXPR,
5283 lntype, ll_mask))))
5285 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5287 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5290 if (r_const)
5292 r_const = fold_convert_loc (loc, lntype, r_const);
5293 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5294 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5295 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5296 fold_build1_loc (loc, BIT_NOT_EXPR,
5297 lntype, rl_mask))))
5299 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5301 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5305 /* If the right sides are not constant, do the same for it. Also,
5306 disallow this optimization if a size or signedness mismatch occurs
5307 between the left and right sides. */
5308 if (l_const == 0)
5310 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5311 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5312 /* Make sure the two fields on the right
5313 correspond to the left without being swapped. */
5314 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5315 return 0;
5317 first_bit = MIN (lr_bitpos, rr_bitpos);
5318 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5319 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5320 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5321 volatilep);
5322 if (rnmode == VOIDmode)
5323 return 0;
5325 rnbitsize = GET_MODE_BITSIZE (rnmode);
5326 rnbitpos = first_bit & ~ (rnbitsize - 1);
5327 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5328 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5330 if (BYTES_BIG_ENDIAN)
5332 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5333 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5336 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5337 rntype, lr_mask),
5338 size_int (xlr_bitpos));
5339 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5340 rntype, rr_mask),
5341 size_int (xrr_bitpos));
5343 /* Make a mask that corresponds to both fields being compared.
5344 Do this for both items being compared. If the operands are the
5345 same size and the bits being compared are in the same position
5346 then we can do this by masking both and comparing the masked
5347 results. */
5348 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5349 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5350 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5352 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5353 ll_unsignedp || rl_unsignedp);
5354 if (! all_ones_mask_p (ll_mask, lnbitsize))
5355 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5357 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5358 lr_unsignedp || rr_unsignedp);
5359 if (! all_ones_mask_p (lr_mask, rnbitsize))
5360 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5362 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5365 /* There is still another way we can do something: If both pairs of
5366 fields being compared are adjacent, we may be able to make a wider
5367 field containing them both.
5369 Note that we still must mask the lhs/rhs expressions. Furthermore,
5370 the mask must be shifted to account for the shift done by
5371 make_bit_field_ref. */
5372 if ((ll_bitsize + ll_bitpos == rl_bitpos
5373 && lr_bitsize + lr_bitpos == rr_bitpos)
5374 || (ll_bitpos == rl_bitpos + rl_bitsize
5375 && lr_bitpos == rr_bitpos + rr_bitsize))
5377 tree type;
5379 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5380 ll_bitsize + rl_bitsize,
5381 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5382 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5383 lr_bitsize + rr_bitsize,
5384 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5386 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5387 size_int (MIN (xll_bitpos, xrl_bitpos)));
5388 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5389 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5391 /* Convert to the smaller type before masking out unwanted bits. */
5392 type = lntype;
5393 if (lntype != rntype)
5395 if (lnbitsize > rnbitsize)
5397 lhs = fold_convert_loc (loc, rntype, lhs);
5398 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5399 type = rntype;
5401 else if (lnbitsize < rnbitsize)
5403 rhs = fold_convert_loc (loc, lntype, rhs);
5404 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5405 type = lntype;
5409 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5410 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5412 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5413 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5415 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5418 return 0;
5421 /* Handle the case of comparisons with constants. If there is something in
5422 common between the masks, those bits of the constants must be the same.
5423 If not, the condition is always false. Test for this to avoid generating
5424 incorrect code below. */
5425 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5426 if (! integer_zerop (result)
5427 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5428 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5430 if (wanted_code == NE_EXPR)
5432 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5433 return constant_boolean_node (true, truth_type);
5435 else
5437 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5438 return constant_boolean_node (false, truth_type);
5442 /* Construct the expression we will return. First get the component
5443 reference we will make. Unless the mask is all ones the width of
5444 that field, perform the mask operation. Then compare with the
5445 merged constant. */
5446 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5447 ll_unsignedp || rl_unsignedp);
5449 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5450 if (! all_ones_mask_p (ll_mask, lnbitsize))
5451 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5453 return build2_loc (loc, wanted_code, truth_type, result,
5454 const_binop (BIT_IOR_EXPR, l_const, r_const));
5457 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5458 constant. */
5460 static tree
5461 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5462 tree op0, tree op1)
5464 tree arg0 = op0;
5465 enum tree_code op_code;
5466 tree comp_const;
5467 tree minmax_const;
5468 int consts_equal, consts_lt;
5469 tree inner;
5471 STRIP_SIGN_NOPS (arg0);
5473 op_code = TREE_CODE (arg0);
5474 minmax_const = TREE_OPERAND (arg0, 1);
5475 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5476 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5477 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5478 inner = TREE_OPERAND (arg0, 0);
5480 /* If something does not permit us to optimize, return the original tree. */
5481 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5482 || TREE_CODE (comp_const) != INTEGER_CST
5483 || TREE_OVERFLOW (comp_const)
5484 || TREE_CODE (minmax_const) != INTEGER_CST
5485 || TREE_OVERFLOW (minmax_const))
5486 return NULL_TREE;
5488 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5489 and GT_EXPR, doing the rest with recursive calls using logical
5490 simplifications. */
5491 switch (code)
5493 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5495 tree tem
5496 = optimize_minmax_comparison (loc,
5497 invert_tree_comparison (code, false),
5498 type, op0, op1);
5499 if (tem)
5500 return invert_truthvalue_loc (loc, tem);
5501 return NULL_TREE;
5504 case GE_EXPR:
5505 return
5506 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5507 optimize_minmax_comparison
5508 (loc, EQ_EXPR, type, arg0, comp_const),
5509 optimize_minmax_comparison
5510 (loc, GT_EXPR, type, arg0, comp_const));
5512 case EQ_EXPR:
5513 if (op_code == MAX_EXPR && consts_equal)
5514 /* MAX (X, 0) == 0 -> X <= 0 */
5515 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5517 else if (op_code == MAX_EXPR && consts_lt)
5518 /* MAX (X, 0) == 5 -> X == 5 */
5519 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5521 else if (op_code == MAX_EXPR)
5522 /* MAX (X, 0) == -1 -> false */
5523 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5525 else if (consts_equal)
5526 /* MIN (X, 0) == 0 -> X >= 0 */
5527 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5529 else if (consts_lt)
5530 /* MIN (X, 0) == 5 -> false */
5531 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5533 else
5534 /* MIN (X, 0) == -1 -> X == -1 */
5535 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5537 case GT_EXPR:
5538 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5539 /* MAX (X, 0) > 0 -> X > 0
5540 MAX (X, 0) > 5 -> X > 5 */
5541 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5543 else if (op_code == MAX_EXPR)
5544 /* MAX (X, 0) > -1 -> true */
5545 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5547 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5548 /* MIN (X, 0) > 0 -> false
5549 MIN (X, 0) > 5 -> false */
5550 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5552 else
5553 /* MIN (X, 0) > -1 -> X > -1 */
5554 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5556 default:
5557 return NULL_TREE;
5561 /* T is an integer expression that is being multiplied, divided, or taken a
5562 modulus (CODE says which and what kind of divide or modulus) by a
5563 constant C. See if we can eliminate that operation by folding it with
5564 other operations already in T. WIDE_TYPE, if non-null, is a type that
5565 should be used for the computation if wider than our type.
5567 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5568 (X * 2) + (Y * 4). We must, however, be assured that either the original
5569 expression would not overflow or that overflow is undefined for the type
5570 in the language in question.
5572 If we return a non-null expression, it is an equivalent form of the
5573 original computation, but need not be in the original type.
5575 We set *STRICT_OVERFLOW_P to true if the return values depends on
5576 signed overflow being undefined. Otherwise we do not change
5577 *STRICT_OVERFLOW_P. */
5579 static tree
5580 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5581 bool *strict_overflow_p)
5583 /* To avoid exponential search depth, refuse to allow recursion past
5584 three levels. Beyond that (1) it's highly unlikely that we'll find
5585 something interesting and (2) we've probably processed it before
5586 when we built the inner expression. */
5588 static int depth;
5589 tree ret;
5591 if (depth > 3)
5592 return NULL;
5594 depth++;
5595 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5596 depth--;
5598 return ret;
5601 static tree
5602 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5603 bool *strict_overflow_p)
5605 tree type = TREE_TYPE (t);
5606 enum tree_code tcode = TREE_CODE (t);
5607 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5608 > GET_MODE_SIZE (TYPE_MODE (type)))
5609 ? wide_type : type);
5610 tree t1, t2;
5611 int same_p = tcode == code;
5612 tree op0 = NULL_TREE, op1 = NULL_TREE;
5613 bool sub_strict_overflow_p;
5615 /* Don't deal with constants of zero here; they confuse the code below. */
5616 if (integer_zerop (c))
5617 return NULL_TREE;
5619 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5620 op0 = TREE_OPERAND (t, 0);
5622 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5623 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5625 /* Note that we need not handle conditional operations here since fold
5626 already handles those cases. So just do arithmetic here. */
5627 switch (tcode)
5629 case INTEGER_CST:
5630 /* For a constant, we can always simplify if we are a multiply
5631 or (for divide and modulus) if it is a multiple of our constant. */
5632 if (code == MULT_EXPR
5633 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5634 return const_binop (code, fold_convert (ctype, t),
5635 fold_convert (ctype, c));
5636 break;
5638 CASE_CONVERT: case NON_LVALUE_EXPR:
5639 /* If op0 is an expression ... */
5640 if ((COMPARISON_CLASS_P (op0)
5641 || UNARY_CLASS_P (op0)
5642 || BINARY_CLASS_P (op0)
5643 || VL_EXP_CLASS_P (op0)
5644 || EXPRESSION_CLASS_P (op0))
5645 /* ... and has wrapping overflow, and its type is smaller
5646 than ctype, then we cannot pass through as widening. */
5647 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5648 && (TYPE_PRECISION (ctype)
5649 > TYPE_PRECISION (TREE_TYPE (op0))))
5650 /* ... or this is a truncation (t is narrower than op0),
5651 then we cannot pass through this narrowing. */
5652 || (TYPE_PRECISION (type)
5653 < TYPE_PRECISION (TREE_TYPE (op0)))
5654 /* ... or signedness changes for division or modulus,
5655 then we cannot pass through this conversion. */
5656 || (code != MULT_EXPR
5657 && (TYPE_UNSIGNED (ctype)
5658 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5659 /* ... or has undefined overflow while the converted to
5660 type has not, we cannot do the operation in the inner type
5661 as that would introduce undefined overflow. */
5662 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5663 && !TYPE_OVERFLOW_UNDEFINED (type))))
5664 break;
5666 /* Pass the constant down and see if we can make a simplification. If
5667 we can, replace this expression with the inner simplification for
5668 possible later conversion to our or some other type. */
5669 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5670 && TREE_CODE (t2) == INTEGER_CST
5671 && !TREE_OVERFLOW (t2)
5672 && (0 != (t1 = extract_muldiv (op0, t2, code,
5673 code == MULT_EXPR
5674 ? ctype : NULL_TREE,
5675 strict_overflow_p))))
5676 return t1;
5677 break;
5679 case ABS_EXPR:
5680 /* If widening the type changes it from signed to unsigned, then we
5681 must avoid building ABS_EXPR itself as unsigned. */
5682 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5684 tree cstype = (*signed_type_for) (ctype);
5685 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5686 != 0)
5688 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5689 return fold_convert (ctype, t1);
5691 break;
5693 /* If the constant is negative, we cannot simplify this. */
5694 if (tree_int_cst_sgn (c) == -1)
5695 break;
5696 /* FALLTHROUGH */
5697 case NEGATE_EXPR:
5698 /* For division and modulus, type can't be unsigned, as e.g.
5699 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5700 For signed types, even with wrapping overflow, this is fine. */
5701 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5702 break;
5703 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5704 != 0)
5705 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5706 break;
5708 case MIN_EXPR: case MAX_EXPR:
5709 /* If widening the type changes the signedness, then we can't perform
5710 this optimization as that changes the result. */
5711 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5712 break;
5714 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5715 sub_strict_overflow_p = false;
5716 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5717 &sub_strict_overflow_p)) != 0
5718 && (t2 = extract_muldiv (op1, c, code, wide_type,
5719 &sub_strict_overflow_p)) != 0)
5721 if (tree_int_cst_sgn (c) < 0)
5722 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5723 if (sub_strict_overflow_p)
5724 *strict_overflow_p = true;
5725 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5726 fold_convert (ctype, t2));
5728 break;
5730 case LSHIFT_EXPR: case RSHIFT_EXPR:
5731 /* If the second operand is constant, this is a multiplication
5732 or floor division, by a power of two, so we can treat it that
5733 way unless the multiplier or divisor overflows. Signed
5734 left-shift overflow is implementation-defined rather than
5735 undefined in C90, so do not convert signed left shift into
5736 multiplication. */
5737 if (TREE_CODE (op1) == INTEGER_CST
5738 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5739 /* const_binop may not detect overflow correctly,
5740 so check for it explicitly here. */
5741 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5742 && TREE_INT_CST_HIGH (op1) == 0
5743 && 0 != (t1 = fold_convert (ctype,
5744 const_binop (LSHIFT_EXPR,
5745 size_one_node,
5746 op1)))
5747 && !TREE_OVERFLOW (t1))
5748 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5749 ? MULT_EXPR : FLOOR_DIV_EXPR,
5750 ctype,
5751 fold_convert (ctype, op0),
5752 t1),
5753 c, code, wide_type, strict_overflow_p);
5754 break;
5756 case PLUS_EXPR: case MINUS_EXPR:
5757 /* See if we can eliminate the operation on both sides. If we can, we
5758 can return a new PLUS or MINUS. If we can't, the only remaining
5759 cases where we can do anything are if the second operand is a
5760 constant. */
5761 sub_strict_overflow_p = false;
5762 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5763 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5764 if (t1 != 0 && t2 != 0
5765 && (code == MULT_EXPR
5766 /* If not multiplication, we can only do this if both operands
5767 are divisible by c. */
5768 || (multiple_of_p (ctype, op0, c)
5769 && multiple_of_p (ctype, op1, c))))
5771 if (sub_strict_overflow_p)
5772 *strict_overflow_p = true;
5773 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5774 fold_convert (ctype, t2));
5777 /* If this was a subtraction, negate OP1 and set it to be an addition.
5778 This simplifies the logic below. */
5779 if (tcode == MINUS_EXPR)
5781 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5782 /* If OP1 was not easily negatable, the constant may be OP0. */
5783 if (TREE_CODE (op0) == INTEGER_CST)
5785 tree tem = op0;
5786 op0 = op1;
5787 op1 = tem;
5788 tem = t1;
5789 t1 = t2;
5790 t2 = tem;
5794 if (TREE_CODE (op1) != INTEGER_CST)
5795 break;
5797 /* If either OP1 or C are negative, this optimization is not safe for
5798 some of the division and remainder types while for others we need
5799 to change the code. */
5800 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5802 if (code == CEIL_DIV_EXPR)
5803 code = FLOOR_DIV_EXPR;
5804 else if (code == FLOOR_DIV_EXPR)
5805 code = CEIL_DIV_EXPR;
5806 else if (code != MULT_EXPR
5807 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5808 break;
5811 /* If it's a multiply or a division/modulus operation of a multiple
5812 of our constant, do the operation and verify it doesn't overflow. */
5813 if (code == MULT_EXPR
5814 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5816 op1 = const_binop (code, fold_convert (ctype, op1),
5817 fold_convert (ctype, c));
5818 /* We allow the constant to overflow with wrapping semantics. */
5819 if (op1 == 0
5820 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5821 break;
5823 else
5824 break;
5826 /* If we have an unsigned type, we cannot widen the operation since it
5827 will change the result if the original computation overflowed. */
5828 if (TYPE_UNSIGNED (ctype) && ctype != type)
5829 break;
5831 /* If we were able to eliminate our operation from the first side,
5832 apply our operation to the second side and reform the PLUS. */
5833 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5834 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5836 /* The last case is if we are a multiply. In that case, we can
5837 apply the distributive law to commute the multiply and addition
5838 if the multiplication of the constants doesn't overflow. */
5839 if (code == MULT_EXPR)
5840 return fold_build2 (tcode, ctype,
5841 fold_build2 (code, ctype,
5842 fold_convert (ctype, op0),
5843 fold_convert (ctype, c)),
5844 op1);
5846 break;
5848 case MULT_EXPR:
5849 /* We have a special case here if we are doing something like
5850 (C * 8) % 4 since we know that's zero. */
5851 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5852 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5853 /* If the multiplication can overflow we cannot optimize this. */
5854 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5855 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5856 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5858 *strict_overflow_p = true;
5859 return omit_one_operand (type, integer_zero_node, op0);
5862 /* ... fall through ... */
5864 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5865 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5866 /* If we can extract our operation from the LHS, do so and return a
5867 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5868 do something only if the second operand is a constant. */
5869 if (same_p
5870 && (t1 = extract_muldiv (op0, c, code, wide_type,
5871 strict_overflow_p)) != 0)
5872 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5873 fold_convert (ctype, op1));
5874 else if (tcode == MULT_EXPR && code == MULT_EXPR
5875 && (t1 = extract_muldiv (op1, c, code, wide_type,
5876 strict_overflow_p)) != 0)
5877 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5878 fold_convert (ctype, t1));
5879 else if (TREE_CODE (op1) != INTEGER_CST)
5880 return 0;
5882 /* If these are the same operation types, we can associate them
5883 assuming no overflow. */
5884 if (tcode == code)
5886 double_int mul;
5887 bool overflow_p;
5888 unsigned prec = TYPE_PRECISION (ctype);
5889 bool uns = TYPE_UNSIGNED (ctype);
5890 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
5891 double_int dic = tree_to_double_int (c).ext (prec, uns);
5892 mul = diop1.mul_with_sign (dic, false, &overflow_p);
5893 overflow_p = ((!uns && overflow_p)
5894 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5895 if (!double_int_fits_to_tree_p (ctype, mul)
5896 && ((uns && tcode != MULT_EXPR) || !uns))
5897 overflow_p = 1;
5898 if (!overflow_p)
5899 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5900 double_int_to_tree (ctype, mul));
5903 /* If these operations "cancel" each other, we have the main
5904 optimizations of this pass, which occur when either constant is a
5905 multiple of the other, in which case we replace this with either an
5906 operation or CODE or TCODE.
5908 If we have an unsigned type, we cannot do this since it will change
5909 the result if the original computation overflowed. */
5910 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5911 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5912 || (tcode == MULT_EXPR
5913 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5914 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5915 && code != MULT_EXPR)))
5917 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5919 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5920 *strict_overflow_p = true;
5921 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5922 fold_convert (ctype,
5923 const_binop (TRUNC_DIV_EXPR,
5924 op1, c)));
5926 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5928 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5929 *strict_overflow_p = true;
5930 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5931 fold_convert (ctype,
5932 const_binop (TRUNC_DIV_EXPR,
5933 c, op1)));
5936 break;
5938 default:
5939 break;
5942 return 0;
5945 /* Return a node which has the indicated constant VALUE (either 0 or
5946 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5947 and is of the indicated TYPE. */
5949 tree
5950 constant_boolean_node (bool value, tree type)
5952 if (type == integer_type_node)
5953 return value ? integer_one_node : integer_zero_node;
5954 else if (type == boolean_type_node)
5955 return value ? boolean_true_node : boolean_false_node;
5956 else if (TREE_CODE (type) == VECTOR_TYPE)
5957 return build_vector_from_val (type,
5958 build_int_cst (TREE_TYPE (type),
5959 value ? -1 : 0));
5960 else
5961 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5965 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5966 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5967 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5968 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5969 COND is the first argument to CODE; otherwise (as in the example
5970 given here), it is the second argument. TYPE is the type of the
5971 original expression. Return NULL_TREE if no simplification is
5972 possible. */
5974 static tree
5975 fold_binary_op_with_conditional_arg (location_t loc,
5976 enum tree_code code,
5977 tree type, tree op0, tree op1,
5978 tree cond, tree arg, int cond_first_p)
5980 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5981 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5982 tree test, true_value, false_value;
5983 tree lhs = NULL_TREE;
5984 tree rhs = NULL_TREE;
5985 enum tree_code cond_code = COND_EXPR;
5987 if (TREE_CODE (cond) == COND_EXPR
5988 || TREE_CODE (cond) == VEC_COND_EXPR)
5990 test = TREE_OPERAND (cond, 0);
5991 true_value = TREE_OPERAND (cond, 1);
5992 false_value = TREE_OPERAND (cond, 2);
5993 /* If this operand throws an expression, then it does not make
5994 sense to try to perform a logical or arithmetic operation
5995 involving it. */
5996 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5997 lhs = true_value;
5998 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5999 rhs = false_value;
6001 else
6003 tree testtype = TREE_TYPE (cond);
6004 test = cond;
6005 true_value = constant_boolean_node (true, testtype);
6006 false_value = constant_boolean_node (false, testtype);
6009 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6010 cond_code = VEC_COND_EXPR;
6012 /* This transformation is only worthwhile if we don't have to wrap ARG
6013 in a SAVE_EXPR and the operation can be simplified without recursing
6014 on at least one of the branches once its pushed inside the COND_EXPR. */
6015 if (!TREE_CONSTANT (arg)
6016 && (TREE_SIDE_EFFECTS (arg)
6017 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6018 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6019 return NULL_TREE;
6021 arg = fold_convert_loc (loc, arg_type, arg);
6022 if (lhs == 0)
6024 true_value = fold_convert_loc (loc, cond_type, true_value);
6025 if (cond_first_p)
6026 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6027 else
6028 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6030 if (rhs == 0)
6032 false_value = fold_convert_loc (loc, cond_type, false_value);
6033 if (cond_first_p)
6034 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6035 else
6036 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6039 /* Check that we have simplified at least one of the branches. */
6040 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6041 return NULL_TREE;
6043 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6047 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6049 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6050 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6051 ADDEND is the same as X.
6053 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6054 and finite. The problematic cases are when X is zero, and its mode
6055 has signed zeros. In the case of rounding towards -infinity,
6056 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6057 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6059 bool
6060 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6062 if (!real_zerop (addend))
6063 return false;
6065 /* Don't allow the fold with -fsignaling-nans. */
6066 if (HONOR_SNANS (TYPE_MODE (type)))
6067 return false;
6069 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6070 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6071 return true;
6073 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6074 if (TREE_CODE (addend) == REAL_CST
6075 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6076 negate = !negate;
6078 /* The mode has signed zeros, and we have to honor their sign.
6079 In this situation, there is only one case we can return true for.
6080 X - 0 is the same as X unless rounding towards -infinity is
6081 supported. */
6082 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6085 /* Subroutine of fold() that checks comparisons of built-in math
6086 functions against real constants.
6088 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6089 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6090 is the type of the result and ARG0 and ARG1 are the operands of the
6091 comparison. ARG1 must be a TREE_REAL_CST.
6093 The function returns the constant folded tree if a simplification
6094 can be made, and NULL_TREE otherwise. */
6096 static tree
6097 fold_mathfn_compare (location_t loc,
6098 enum built_in_function fcode, enum tree_code code,
6099 tree type, tree arg0, tree arg1)
6101 REAL_VALUE_TYPE c;
6103 if (BUILTIN_SQRT_P (fcode))
6105 tree arg = CALL_EXPR_ARG (arg0, 0);
6106 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6108 c = TREE_REAL_CST (arg1);
6109 if (REAL_VALUE_NEGATIVE (c))
6111 /* sqrt(x) < y is always false, if y is negative. */
6112 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6113 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6115 /* sqrt(x) > y is always true, if y is negative and we
6116 don't care about NaNs, i.e. negative values of x. */
6117 if (code == NE_EXPR || !HONOR_NANS (mode))
6118 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6120 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6121 return fold_build2_loc (loc, GE_EXPR, type, arg,
6122 build_real (TREE_TYPE (arg), dconst0));
6124 else if (code == GT_EXPR || code == GE_EXPR)
6126 REAL_VALUE_TYPE c2;
6128 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6129 real_convert (&c2, mode, &c2);
6131 if (REAL_VALUE_ISINF (c2))
6133 /* sqrt(x) > y is x == +Inf, when y is very large. */
6134 if (HONOR_INFINITIES (mode))
6135 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6136 build_real (TREE_TYPE (arg), c2));
6138 /* sqrt(x) > y is always false, when y is very large
6139 and we don't care about infinities. */
6140 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6143 /* sqrt(x) > c is the same as x > c*c. */
6144 return fold_build2_loc (loc, code, type, arg,
6145 build_real (TREE_TYPE (arg), c2));
6147 else if (code == LT_EXPR || code == LE_EXPR)
6149 REAL_VALUE_TYPE c2;
6151 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6152 real_convert (&c2, mode, &c2);
6154 if (REAL_VALUE_ISINF (c2))
6156 /* sqrt(x) < y is always true, when y is a very large
6157 value and we don't care about NaNs or Infinities. */
6158 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6159 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6161 /* sqrt(x) < y is x != +Inf when y is very large and we
6162 don't care about NaNs. */
6163 if (! HONOR_NANS (mode))
6164 return fold_build2_loc (loc, NE_EXPR, type, arg,
6165 build_real (TREE_TYPE (arg), c2));
6167 /* sqrt(x) < y is x >= 0 when y is very large and we
6168 don't care about Infinities. */
6169 if (! HONOR_INFINITIES (mode))
6170 return fold_build2_loc (loc, GE_EXPR, type, arg,
6171 build_real (TREE_TYPE (arg), dconst0));
6173 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6174 arg = save_expr (arg);
6175 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6176 fold_build2_loc (loc, GE_EXPR, type, arg,
6177 build_real (TREE_TYPE (arg),
6178 dconst0)),
6179 fold_build2_loc (loc, NE_EXPR, type, arg,
6180 build_real (TREE_TYPE (arg),
6181 c2)));
6184 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6185 if (! HONOR_NANS (mode))
6186 return fold_build2_loc (loc, code, type, arg,
6187 build_real (TREE_TYPE (arg), c2));
6189 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6190 arg = save_expr (arg);
6191 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6192 fold_build2_loc (loc, GE_EXPR, type, arg,
6193 build_real (TREE_TYPE (arg),
6194 dconst0)),
6195 fold_build2_loc (loc, code, type, arg,
6196 build_real (TREE_TYPE (arg),
6197 c2)));
6201 return NULL_TREE;
6204 /* Subroutine of fold() that optimizes comparisons against Infinities,
6205 either +Inf or -Inf.
6207 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6208 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6209 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6211 The function returns the constant folded tree if a simplification
6212 can be made, and NULL_TREE otherwise. */
6214 static tree
6215 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6216 tree arg0, tree arg1)
6218 enum machine_mode mode;
6219 REAL_VALUE_TYPE max;
6220 tree temp;
6221 bool neg;
6223 mode = TYPE_MODE (TREE_TYPE (arg0));
6225 /* For negative infinity swap the sense of the comparison. */
6226 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6227 if (neg)
6228 code = swap_tree_comparison (code);
6230 switch (code)
6232 case GT_EXPR:
6233 /* x > +Inf is always false, if with ignore sNANs. */
6234 if (HONOR_SNANS (mode))
6235 return NULL_TREE;
6236 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6238 case LE_EXPR:
6239 /* x <= +Inf is always true, if we don't case about NaNs. */
6240 if (! HONOR_NANS (mode))
6241 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6243 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6244 arg0 = save_expr (arg0);
6245 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6247 case EQ_EXPR:
6248 case GE_EXPR:
6249 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6250 real_maxval (&max, neg, mode);
6251 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6252 arg0, build_real (TREE_TYPE (arg0), max));
6254 case LT_EXPR:
6255 /* x < +Inf is always equal to x <= DBL_MAX. */
6256 real_maxval (&max, neg, mode);
6257 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6258 arg0, build_real (TREE_TYPE (arg0), max));
6260 case NE_EXPR:
6261 /* x != +Inf is always equal to !(x > DBL_MAX). */
6262 real_maxval (&max, neg, mode);
6263 if (! HONOR_NANS (mode))
6264 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6265 arg0, build_real (TREE_TYPE (arg0), max));
6267 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6268 arg0, build_real (TREE_TYPE (arg0), max));
6269 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6271 default:
6272 break;
6275 return NULL_TREE;
6278 /* Subroutine of fold() that optimizes comparisons of a division by
6279 a nonzero integer constant against an integer constant, i.e.
6280 X/C1 op C2.
6282 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6283 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6284 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6286 The function returns the constant folded tree if a simplification
6287 can be made, and NULL_TREE otherwise. */
6289 static tree
6290 fold_div_compare (location_t loc,
6291 enum tree_code code, tree type, tree arg0, tree arg1)
6293 tree prod, tmp, hi, lo;
6294 tree arg00 = TREE_OPERAND (arg0, 0);
6295 tree arg01 = TREE_OPERAND (arg0, 1);
6296 double_int val;
6297 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6298 bool neg_overflow;
6299 bool overflow;
6301 /* We have to do this the hard way to detect unsigned overflow.
6302 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6303 val = TREE_INT_CST (arg01)
6304 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6305 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6306 neg_overflow = false;
6308 if (unsigned_p)
6310 tmp = int_const_binop (MINUS_EXPR, arg01,
6311 build_int_cst (TREE_TYPE (arg01), 1));
6312 lo = prod;
6314 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6315 val = TREE_INT_CST (prod)
6316 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6317 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6318 -1, overflow | TREE_OVERFLOW (prod));
6320 else if (tree_int_cst_sgn (arg01) >= 0)
6322 tmp = int_const_binop (MINUS_EXPR, arg01,
6323 build_int_cst (TREE_TYPE (arg01), 1));
6324 switch (tree_int_cst_sgn (arg1))
6326 case -1:
6327 neg_overflow = true;
6328 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6329 hi = prod;
6330 break;
6332 case 0:
6333 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6334 hi = tmp;
6335 break;
6337 case 1:
6338 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6339 lo = prod;
6340 break;
6342 default:
6343 gcc_unreachable ();
6346 else
6348 /* A negative divisor reverses the relational operators. */
6349 code = swap_tree_comparison (code);
6351 tmp = int_const_binop (PLUS_EXPR, arg01,
6352 build_int_cst (TREE_TYPE (arg01), 1));
6353 switch (tree_int_cst_sgn (arg1))
6355 case -1:
6356 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6357 lo = prod;
6358 break;
6360 case 0:
6361 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6362 lo = tmp;
6363 break;
6365 case 1:
6366 neg_overflow = true;
6367 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6368 hi = prod;
6369 break;
6371 default:
6372 gcc_unreachable ();
6376 switch (code)
6378 case EQ_EXPR:
6379 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6380 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6381 if (TREE_OVERFLOW (hi))
6382 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6383 if (TREE_OVERFLOW (lo))
6384 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6385 return build_range_check (loc, type, arg00, 1, lo, hi);
6387 case NE_EXPR:
6388 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6389 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6390 if (TREE_OVERFLOW (hi))
6391 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6392 if (TREE_OVERFLOW (lo))
6393 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6394 return build_range_check (loc, type, arg00, 0, lo, hi);
6396 case LT_EXPR:
6397 if (TREE_OVERFLOW (lo))
6399 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6400 return omit_one_operand_loc (loc, type, tmp, arg00);
6402 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6404 case LE_EXPR:
6405 if (TREE_OVERFLOW (hi))
6407 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6408 return omit_one_operand_loc (loc, type, tmp, arg00);
6410 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6412 case GT_EXPR:
6413 if (TREE_OVERFLOW (hi))
6415 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6416 return omit_one_operand_loc (loc, type, tmp, arg00);
6418 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6420 case GE_EXPR:
6421 if (TREE_OVERFLOW (lo))
6423 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6424 return omit_one_operand_loc (loc, type, tmp, arg00);
6426 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6428 default:
6429 break;
6432 return NULL_TREE;
6436 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6437 equality/inequality test, then return a simplified form of the test
6438 using a sign testing. Otherwise return NULL. TYPE is the desired
6439 result type. */
6441 static tree
6442 fold_single_bit_test_into_sign_test (location_t loc,
6443 enum tree_code code, tree arg0, tree arg1,
6444 tree result_type)
6446 /* If this is testing a single bit, we can optimize the test. */
6447 if ((code == NE_EXPR || code == EQ_EXPR)
6448 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6449 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6451 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6452 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6453 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6455 if (arg00 != NULL_TREE
6456 /* This is only a win if casting to a signed type is cheap,
6457 i.e. when arg00's type is not a partial mode. */
6458 && TYPE_PRECISION (TREE_TYPE (arg00))
6459 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6461 tree stype = signed_type_for (TREE_TYPE (arg00));
6462 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6463 result_type,
6464 fold_convert_loc (loc, stype, arg00),
6465 build_int_cst (stype, 0));
6469 return NULL_TREE;
6472 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6473 equality/inequality test, then return a simplified form of
6474 the test using shifts and logical operations. Otherwise return
6475 NULL. TYPE is the desired result type. */
6477 tree
6478 fold_single_bit_test (location_t loc, enum tree_code code,
6479 tree arg0, tree arg1, tree result_type)
6481 /* If this is testing a single bit, we can optimize the test. */
6482 if ((code == NE_EXPR || code == EQ_EXPR)
6483 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6484 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6486 tree inner = TREE_OPERAND (arg0, 0);
6487 tree type = TREE_TYPE (arg0);
6488 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6489 enum machine_mode operand_mode = TYPE_MODE (type);
6490 int ops_unsigned;
6491 tree signed_type, unsigned_type, intermediate_type;
6492 tree tem, one;
6494 /* First, see if we can fold the single bit test into a sign-bit
6495 test. */
6496 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6497 result_type);
6498 if (tem)
6499 return tem;
6501 /* Otherwise we have (A & C) != 0 where C is a single bit,
6502 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6503 Similarly for (A & C) == 0. */
6505 /* If INNER is a right shift of a constant and it plus BITNUM does
6506 not overflow, adjust BITNUM and INNER. */
6507 if (TREE_CODE (inner) == RSHIFT_EXPR
6508 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6509 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6510 && bitnum < TYPE_PRECISION (type)
6511 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6512 bitnum - TYPE_PRECISION (type)))
6514 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6515 inner = TREE_OPERAND (inner, 0);
6518 /* If we are going to be able to omit the AND below, we must do our
6519 operations as unsigned. If we must use the AND, we have a choice.
6520 Normally unsigned is faster, but for some machines signed is. */
6521 #ifdef LOAD_EXTEND_OP
6522 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6523 && !flag_syntax_only) ? 0 : 1;
6524 #else
6525 ops_unsigned = 1;
6526 #endif
6528 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6529 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6530 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6531 inner = fold_convert_loc (loc, intermediate_type, inner);
6533 if (bitnum != 0)
6534 inner = build2 (RSHIFT_EXPR, intermediate_type,
6535 inner, size_int (bitnum));
6537 one = build_int_cst (intermediate_type, 1);
6539 if (code == EQ_EXPR)
6540 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6542 /* Put the AND last so it can combine with more things. */
6543 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6545 /* Make sure to return the proper type. */
6546 inner = fold_convert_loc (loc, result_type, inner);
6548 return inner;
6550 return NULL_TREE;
6553 /* Check whether we are allowed to reorder operands arg0 and arg1,
6554 such that the evaluation of arg1 occurs before arg0. */
6556 static bool
6557 reorder_operands_p (const_tree arg0, const_tree arg1)
6559 if (! flag_evaluation_order)
6560 return true;
6561 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6562 return true;
6563 return ! TREE_SIDE_EFFECTS (arg0)
6564 && ! TREE_SIDE_EFFECTS (arg1);
6567 /* Test whether it is preferable two swap two operands, ARG0 and
6568 ARG1, for example because ARG0 is an integer constant and ARG1
6569 isn't. If REORDER is true, only recommend swapping if we can
6570 evaluate the operands in reverse order. */
6572 bool
6573 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6575 STRIP_SIGN_NOPS (arg0);
6576 STRIP_SIGN_NOPS (arg1);
6578 if (TREE_CODE (arg1) == INTEGER_CST)
6579 return 0;
6580 if (TREE_CODE (arg0) == INTEGER_CST)
6581 return 1;
6583 if (TREE_CODE (arg1) == REAL_CST)
6584 return 0;
6585 if (TREE_CODE (arg0) == REAL_CST)
6586 return 1;
6588 if (TREE_CODE (arg1) == FIXED_CST)
6589 return 0;
6590 if (TREE_CODE (arg0) == FIXED_CST)
6591 return 1;
6593 if (TREE_CODE (arg1) == COMPLEX_CST)
6594 return 0;
6595 if (TREE_CODE (arg0) == COMPLEX_CST)
6596 return 1;
6598 if (TREE_CONSTANT (arg1))
6599 return 0;
6600 if (TREE_CONSTANT (arg0))
6601 return 1;
6603 if (optimize_function_for_size_p (cfun))
6604 return 0;
6606 if (reorder && flag_evaluation_order
6607 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6608 return 0;
6610 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6611 for commutative and comparison operators. Ensuring a canonical
6612 form allows the optimizers to find additional redundancies without
6613 having to explicitly check for both orderings. */
6614 if (TREE_CODE (arg0) == SSA_NAME
6615 && TREE_CODE (arg1) == SSA_NAME
6616 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6617 return 1;
6619 /* Put SSA_NAMEs last. */
6620 if (TREE_CODE (arg1) == SSA_NAME)
6621 return 0;
6622 if (TREE_CODE (arg0) == SSA_NAME)
6623 return 1;
6625 /* Put variables last. */
6626 if (DECL_P (arg1))
6627 return 0;
6628 if (DECL_P (arg0))
6629 return 1;
6631 return 0;
6634 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6635 ARG0 is extended to a wider type. */
6637 static tree
6638 fold_widened_comparison (location_t loc, enum tree_code code,
6639 tree type, tree arg0, tree arg1)
6641 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6642 tree arg1_unw;
6643 tree shorter_type, outer_type;
6644 tree min, max;
6645 bool above, below;
6647 if (arg0_unw == arg0)
6648 return NULL_TREE;
6649 shorter_type = TREE_TYPE (arg0_unw);
6651 #ifdef HAVE_canonicalize_funcptr_for_compare
6652 /* Disable this optimization if we're casting a function pointer
6653 type on targets that require function pointer canonicalization. */
6654 if (HAVE_canonicalize_funcptr_for_compare
6655 && TREE_CODE (shorter_type) == POINTER_TYPE
6656 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6657 return NULL_TREE;
6658 #endif
6660 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6661 return NULL_TREE;
6663 arg1_unw = get_unwidened (arg1, NULL_TREE);
6665 /* If possible, express the comparison in the shorter mode. */
6666 if ((code == EQ_EXPR || code == NE_EXPR
6667 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6668 && (TREE_TYPE (arg1_unw) == shorter_type
6669 || ((TYPE_PRECISION (shorter_type)
6670 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6671 && (TYPE_UNSIGNED (shorter_type)
6672 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6673 || (TREE_CODE (arg1_unw) == INTEGER_CST
6674 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6675 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6676 && int_fits_type_p (arg1_unw, shorter_type))))
6677 return fold_build2_loc (loc, code, type, arg0_unw,
6678 fold_convert_loc (loc, shorter_type, arg1_unw));
6680 if (TREE_CODE (arg1_unw) != INTEGER_CST
6681 || TREE_CODE (shorter_type) != INTEGER_TYPE
6682 || !int_fits_type_p (arg1_unw, shorter_type))
6683 return NULL_TREE;
6685 /* If we are comparing with the integer that does not fit into the range
6686 of the shorter type, the result is known. */
6687 outer_type = TREE_TYPE (arg1_unw);
6688 min = lower_bound_in_type (outer_type, shorter_type);
6689 max = upper_bound_in_type (outer_type, shorter_type);
6691 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6692 max, arg1_unw));
6693 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6694 arg1_unw, min));
6696 switch (code)
6698 case EQ_EXPR:
6699 if (above || below)
6700 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6701 break;
6703 case NE_EXPR:
6704 if (above || below)
6705 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6706 break;
6708 case LT_EXPR:
6709 case LE_EXPR:
6710 if (above)
6711 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6712 else if (below)
6713 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6715 case GT_EXPR:
6716 case GE_EXPR:
6717 if (above)
6718 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6719 else if (below)
6720 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6722 default:
6723 break;
6726 return NULL_TREE;
6729 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6730 ARG0 just the signedness is changed. */
6732 static tree
6733 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6734 tree arg0, tree arg1)
6736 tree arg0_inner;
6737 tree inner_type, outer_type;
6739 if (!CONVERT_EXPR_P (arg0))
6740 return NULL_TREE;
6742 outer_type = TREE_TYPE (arg0);
6743 arg0_inner = TREE_OPERAND (arg0, 0);
6744 inner_type = TREE_TYPE (arg0_inner);
6746 #ifdef HAVE_canonicalize_funcptr_for_compare
6747 /* Disable this optimization if we're casting a function pointer
6748 type on targets that require function pointer canonicalization. */
6749 if (HAVE_canonicalize_funcptr_for_compare
6750 && TREE_CODE (inner_type) == POINTER_TYPE
6751 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6752 return NULL_TREE;
6753 #endif
6755 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6756 return NULL_TREE;
6758 if (TREE_CODE (arg1) != INTEGER_CST
6759 && !(CONVERT_EXPR_P (arg1)
6760 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6761 return NULL_TREE;
6763 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6764 && code != NE_EXPR
6765 && code != EQ_EXPR)
6766 return NULL_TREE;
6768 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6769 return NULL_TREE;
6771 if (TREE_CODE (arg1) == INTEGER_CST)
6772 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6773 0, TREE_OVERFLOW (arg1));
6774 else
6775 arg1 = fold_convert_loc (loc, inner_type, arg1);
6777 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6780 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6781 step of the array. Reconstructs s and delta in the case of s *
6782 delta being an integer constant (and thus already folded). ADDR is
6783 the address. MULT is the multiplicative expression. If the
6784 function succeeds, the new address expression is returned.
6785 Otherwise NULL_TREE is returned. LOC is the location of the
6786 resulting expression. */
6788 static tree
6789 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6791 tree s, delta, step;
6792 tree ref = TREE_OPERAND (addr, 0), pref;
6793 tree ret, pos;
6794 tree itype;
6795 bool mdim = false;
6797 /* Strip the nops that might be added when converting op1 to sizetype. */
6798 STRIP_NOPS (op1);
6800 /* Canonicalize op1 into a possibly non-constant delta
6801 and an INTEGER_CST s. */
6802 if (TREE_CODE (op1) == MULT_EXPR)
6804 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6806 STRIP_NOPS (arg0);
6807 STRIP_NOPS (arg1);
6809 if (TREE_CODE (arg0) == INTEGER_CST)
6811 s = arg0;
6812 delta = arg1;
6814 else if (TREE_CODE (arg1) == INTEGER_CST)
6816 s = arg1;
6817 delta = arg0;
6819 else
6820 return NULL_TREE;
6822 else if (TREE_CODE (op1) == INTEGER_CST)
6824 delta = op1;
6825 s = NULL_TREE;
6827 else
6829 /* Simulate we are delta * 1. */
6830 delta = op1;
6831 s = integer_one_node;
6834 /* Handle &x.array the same as we would handle &x.array[0]. */
6835 if (TREE_CODE (ref) == COMPONENT_REF
6836 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6838 tree domain;
6840 /* Remember if this was a multi-dimensional array. */
6841 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6842 mdim = true;
6844 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6845 if (! domain)
6846 goto cont;
6847 itype = TREE_TYPE (domain);
6849 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6850 if (TREE_CODE (step) != INTEGER_CST)
6851 goto cont;
6853 if (s)
6855 if (! tree_int_cst_equal (step, s))
6856 goto cont;
6858 else
6860 /* Try if delta is a multiple of step. */
6861 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6862 if (! tmp)
6863 goto cont;
6864 delta = tmp;
6867 /* Only fold here if we can verify we do not overflow one
6868 dimension of a multi-dimensional array. */
6869 if (mdim)
6871 tree tmp;
6873 if (!TYPE_MIN_VALUE (domain)
6874 || !TYPE_MAX_VALUE (domain)
6875 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6876 goto cont;
6878 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6879 fold_convert_loc (loc, itype,
6880 TYPE_MIN_VALUE (domain)),
6881 fold_convert_loc (loc, itype, delta));
6882 if (TREE_CODE (tmp) != INTEGER_CST
6883 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6884 goto cont;
6887 /* We found a suitable component reference. */
6889 pref = TREE_OPERAND (addr, 0);
6890 ret = copy_node (pref);
6891 SET_EXPR_LOCATION (ret, loc);
6893 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6894 fold_build2_loc
6895 (loc, PLUS_EXPR, itype,
6896 fold_convert_loc (loc, itype,
6897 TYPE_MIN_VALUE
6898 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6899 fold_convert_loc (loc, itype, delta)),
6900 NULL_TREE, NULL_TREE);
6901 return build_fold_addr_expr_loc (loc, ret);
6904 cont:
6906 for (;; ref = TREE_OPERAND (ref, 0))
6908 if (TREE_CODE (ref) == ARRAY_REF)
6910 tree domain;
6912 /* Remember if this was a multi-dimensional array. */
6913 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6914 mdim = true;
6916 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6917 if (! domain)
6918 continue;
6919 itype = TREE_TYPE (domain);
6921 step = array_ref_element_size (ref);
6922 if (TREE_CODE (step) != INTEGER_CST)
6923 continue;
6925 if (s)
6927 if (! tree_int_cst_equal (step, s))
6928 continue;
6930 else
6932 /* Try if delta is a multiple of step. */
6933 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6934 if (! tmp)
6935 continue;
6936 delta = tmp;
6939 /* Only fold here if we can verify we do not overflow one
6940 dimension of a multi-dimensional array. */
6941 if (mdim)
6943 tree tmp;
6945 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6946 || !TYPE_MAX_VALUE (domain)
6947 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6948 continue;
6950 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6951 fold_convert_loc (loc, itype,
6952 TREE_OPERAND (ref, 1)),
6953 fold_convert_loc (loc, itype, delta));
6954 if (!tmp
6955 || TREE_CODE (tmp) != INTEGER_CST
6956 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6957 continue;
6960 break;
6962 else
6963 mdim = false;
6965 if (!handled_component_p (ref))
6966 return NULL_TREE;
6969 /* We found the suitable array reference. So copy everything up to it,
6970 and replace the index. */
6972 pref = TREE_OPERAND (addr, 0);
6973 ret = copy_node (pref);
6974 SET_EXPR_LOCATION (ret, loc);
6975 pos = ret;
6977 while (pref != ref)
6979 pref = TREE_OPERAND (pref, 0);
6980 TREE_OPERAND (pos, 0) = copy_node (pref);
6981 pos = TREE_OPERAND (pos, 0);
6984 TREE_OPERAND (pos, 1)
6985 = fold_build2_loc (loc, PLUS_EXPR, itype,
6986 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
6987 fold_convert_loc (loc, itype, delta));
6988 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6992 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6993 means A >= Y && A != MAX, but in this case we know that
6994 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6996 static tree
6997 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6999 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7001 if (TREE_CODE (bound) == LT_EXPR)
7002 a = TREE_OPERAND (bound, 0);
7003 else if (TREE_CODE (bound) == GT_EXPR)
7004 a = TREE_OPERAND (bound, 1);
7005 else
7006 return NULL_TREE;
7008 typea = TREE_TYPE (a);
7009 if (!INTEGRAL_TYPE_P (typea)
7010 && !POINTER_TYPE_P (typea))
7011 return NULL_TREE;
7013 if (TREE_CODE (ineq) == LT_EXPR)
7015 a1 = TREE_OPERAND (ineq, 1);
7016 y = TREE_OPERAND (ineq, 0);
7018 else if (TREE_CODE (ineq) == GT_EXPR)
7020 a1 = TREE_OPERAND (ineq, 0);
7021 y = TREE_OPERAND (ineq, 1);
7023 else
7024 return NULL_TREE;
7026 if (TREE_TYPE (a1) != typea)
7027 return NULL_TREE;
7029 if (POINTER_TYPE_P (typea))
7031 /* Convert the pointer types into integer before taking the difference. */
7032 tree ta = fold_convert_loc (loc, ssizetype, a);
7033 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7034 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7036 else
7037 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7039 if (!diff || !integer_onep (diff))
7040 return NULL_TREE;
7042 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7045 /* Fold a sum or difference of at least one multiplication.
7046 Returns the folded tree or NULL if no simplification could be made. */
7048 static tree
7049 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7050 tree arg0, tree arg1)
7052 tree arg00, arg01, arg10, arg11;
7053 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7055 /* (A * C) +- (B * C) -> (A+-B) * C.
7056 (A * C) +- A -> A * (C+-1).
7057 We are most concerned about the case where C is a constant,
7058 but other combinations show up during loop reduction. Since
7059 it is not difficult, try all four possibilities. */
7061 if (TREE_CODE (arg0) == MULT_EXPR)
7063 arg00 = TREE_OPERAND (arg0, 0);
7064 arg01 = TREE_OPERAND (arg0, 1);
7066 else if (TREE_CODE (arg0) == INTEGER_CST)
7068 arg00 = build_one_cst (type);
7069 arg01 = arg0;
7071 else
7073 /* We cannot generate constant 1 for fract. */
7074 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7075 return NULL_TREE;
7076 arg00 = arg0;
7077 arg01 = build_one_cst (type);
7079 if (TREE_CODE (arg1) == MULT_EXPR)
7081 arg10 = TREE_OPERAND (arg1, 0);
7082 arg11 = TREE_OPERAND (arg1, 1);
7084 else if (TREE_CODE (arg1) == INTEGER_CST)
7086 arg10 = build_one_cst (type);
7087 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7088 the purpose of this canonicalization. */
7089 if (TREE_INT_CST_HIGH (arg1) == -1
7090 && negate_expr_p (arg1)
7091 && code == PLUS_EXPR)
7093 arg11 = negate_expr (arg1);
7094 code = MINUS_EXPR;
7096 else
7097 arg11 = arg1;
7099 else
7101 /* We cannot generate constant 1 for fract. */
7102 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7103 return NULL_TREE;
7104 arg10 = arg1;
7105 arg11 = build_one_cst (type);
7107 same = NULL_TREE;
7109 if (operand_equal_p (arg01, arg11, 0))
7110 same = arg01, alt0 = arg00, alt1 = arg10;
7111 else if (operand_equal_p (arg00, arg10, 0))
7112 same = arg00, alt0 = arg01, alt1 = arg11;
7113 else if (operand_equal_p (arg00, arg11, 0))
7114 same = arg00, alt0 = arg01, alt1 = arg10;
7115 else if (operand_equal_p (arg01, arg10, 0))
7116 same = arg01, alt0 = arg00, alt1 = arg11;
7118 /* No identical multiplicands; see if we can find a common
7119 power-of-two factor in non-power-of-two multiplies. This
7120 can help in multi-dimensional array access. */
7121 else if (host_integerp (arg01, 0)
7122 && host_integerp (arg11, 0))
7124 HOST_WIDE_INT int01, int11, tmp;
7125 bool swap = false;
7126 tree maybe_same;
7127 int01 = TREE_INT_CST_LOW (arg01);
7128 int11 = TREE_INT_CST_LOW (arg11);
7130 /* Move min of absolute values to int11. */
7131 if (absu_hwi (int01) < absu_hwi (int11))
7133 tmp = int01, int01 = int11, int11 = tmp;
7134 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7135 maybe_same = arg01;
7136 swap = true;
7138 else
7139 maybe_same = arg11;
7141 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7142 /* The remainder should not be a constant, otherwise we
7143 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7144 increased the number of multiplications necessary. */
7145 && TREE_CODE (arg10) != INTEGER_CST)
7147 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7148 build_int_cst (TREE_TYPE (arg00),
7149 int01 / int11));
7150 alt1 = arg10;
7151 same = maybe_same;
7152 if (swap)
7153 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7157 if (same)
7158 return fold_build2_loc (loc, MULT_EXPR, type,
7159 fold_build2_loc (loc, code, type,
7160 fold_convert_loc (loc, type, alt0),
7161 fold_convert_loc (loc, type, alt1)),
7162 fold_convert_loc (loc, type, same));
7164 return NULL_TREE;
7167 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7168 specified by EXPR into the buffer PTR of length LEN bytes.
7169 Return the number of bytes placed in the buffer, or zero
7170 upon failure. */
7172 static int
7173 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7175 tree type = TREE_TYPE (expr);
7176 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7177 int byte, offset, word, words;
7178 unsigned char value;
7180 if (total_bytes > len)
7181 return 0;
7182 words = total_bytes / UNITS_PER_WORD;
7184 for (byte = 0; byte < total_bytes; byte++)
7186 int bitpos = byte * BITS_PER_UNIT;
7187 if (bitpos < HOST_BITS_PER_WIDE_INT)
7188 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7189 else
7190 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7191 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7193 if (total_bytes > UNITS_PER_WORD)
7195 word = byte / UNITS_PER_WORD;
7196 if (WORDS_BIG_ENDIAN)
7197 word = (words - 1) - word;
7198 offset = word * UNITS_PER_WORD;
7199 if (BYTES_BIG_ENDIAN)
7200 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7201 else
7202 offset += byte % UNITS_PER_WORD;
7204 else
7205 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7206 ptr[offset] = value;
7208 return total_bytes;
7212 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7213 specified by EXPR into the buffer PTR of length LEN bytes.
7214 Return the number of bytes placed in the buffer, or zero
7215 upon failure. */
7217 static int
7218 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7220 tree type = TREE_TYPE (expr);
7221 enum machine_mode mode = TYPE_MODE (type);
7222 int total_bytes = GET_MODE_SIZE (mode);
7223 FIXED_VALUE_TYPE value;
7224 tree i_value, i_type;
7226 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7227 return 0;
7229 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7231 if (NULL_TREE == i_type
7232 || TYPE_PRECISION (i_type) != total_bytes)
7233 return 0;
7235 value = TREE_FIXED_CST (expr);
7236 i_value = double_int_to_tree (i_type, value.data);
7238 return native_encode_int (i_value, ptr, len);
7242 /* Subroutine of native_encode_expr. Encode the REAL_CST
7243 specified by EXPR into the buffer PTR of length LEN bytes.
7244 Return the number of bytes placed in the buffer, or zero
7245 upon failure. */
7247 static int
7248 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7250 tree type = TREE_TYPE (expr);
7251 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7252 int byte, offset, word, words, bitpos;
7253 unsigned char value;
7255 /* There are always 32 bits in each long, no matter the size of
7256 the hosts long. We handle floating point representations with
7257 up to 192 bits. */
7258 long tmp[6];
7260 if (total_bytes > len)
7261 return 0;
7262 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7264 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7266 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7267 bitpos += BITS_PER_UNIT)
7269 byte = (bitpos / BITS_PER_UNIT) & 3;
7270 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7272 if (UNITS_PER_WORD < 4)
7274 word = byte / UNITS_PER_WORD;
7275 if (WORDS_BIG_ENDIAN)
7276 word = (words - 1) - word;
7277 offset = word * UNITS_PER_WORD;
7278 if (BYTES_BIG_ENDIAN)
7279 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7280 else
7281 offset += byte % UNITS_PER_WORD;
7283 else
7284 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7285 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7287 return total_bytes;
7290 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7291 specified by EXPR into the buffer PTR of length LEN bytes.
7292 Return the number of bytes placed in the buffer, or zero
7293 upon failure. */
7295 static int
7296 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7298 int rsize, isize;
7299 tree part;
7301 part = TREE_REALPART (expr);
7302 rsize = native_encode_expr (part, ptr, len);
7303 if (rsize == 0)
7304 return 0;
7305 part = TREE_IMAGPART (expr);
7306 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7307 if (isize != rsize)
7308 return 0;
7309 return rsize + isize;
7313 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7314 specified by EXPR into the buffer PTR of length LEN bytes.
7315 Return the number of bytes placed in the buffer, or zero
7316 upon failure. */
7318 static int
7319 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7321 unsigned i, count;
7322 int size, offset;
7323 tree itype, elem;
7325 offset = 0;
7326 count = VECTOR_CST_NELTS (expr);
7327 itype = TREE_TYPE (TREE_TYPE (expr));
7328 size = GET_MODE_SIZE (TYPE_MODE (itype));
7329 for (i = 0; i < count; i++)
7331 elem = VECTOR_CST_ELT (expr, i);
7332 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7333 return 0;
7334 offset += size;
7336 return offset;
7340 /* Subroutine of native_encode_expr. Encode the STRING_CST
7341 specified by EXPR into the buffer PTR of length LEN bytes.
7342 Return the number of bytes placed in the buffer, or zero
7343 upon failure. */
7345 static int
7346 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7348 tree type = TREE_TYPE (expr);
7349 HOST_WIDE_INT total_bytes;
7351 if (TREE_CODE (type) != ARRAY_TYPE
7352 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7353 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7354 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7355 return 0;
7356 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7357 if (total_bytes > len)
7358 return 0;
7359 if (TREE_STRING_LENGTH (expr) < total_bytes)
7361 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7362 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7363 total_bytes - TREE_STRING_LENGTH (expr));
7365 else
7366 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7367 return total_bytes;
7371 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7372 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7373 buffer PTR of length LEN bytes. Return the number of bytes
7374 placed in the buffer, or zero upon failure. */
7377 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7379 switch (TREE_CODE (expr))
7381 case INTEGER_CST:
7382 return native_encode_int (expr, ptr, len);
7384 case REAL_CST:
7385 return native_encode_real (expr, ptr, len);
7387 case FIXED_CST:
7388 return native_encode_fixed (expr, ptr, len);
7390 case COMPLEX_CST:
7391 return native_encode_complex (expr, ptr, len);
7393 case VECTOR_CST:
7394 return native_encode_vector (expr, ptr, len);
7396 case STRING_CST:
7397 return native_encode_string (expr, ptr, len);
7399 default:
7400 return 0;
7405 /* Subroutine of native_interpret_expr. Interpret the contents of
7406 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7407 If the buffer cannot be interpreted, return NULL_TREE. */
7409 static tree
7410 native_interpret_int (tree type, const unsigned char *ptr, int len)
7412 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7413 double_int result;
7415 if (total_bytes > len
7416 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7417 return NULL_TREE;
7419 result = double_int::from_buffer (ptr, total_bytes);
7421 return double_int_to_tree (type, result);
7425 /* Subroutine of native_interpret_expr. Interpret the contents of
7426 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7427 If the buffer cannot be interpreted, return NULL_TREE. */
7429 static tree
7430 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7432 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7433 double_int result;
7434 FIXED_VALUE_TYPE fixed_value;
7436 if (total_bytes > len
7437 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7438 return NULL_TREE;
7440 result = double_int::from_buffer (ptr, total_bytes);
7441 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7443 return build_fixed (type, fixed_value);
7447 /* Subroutine of native_interpret_expr. Interpret the contents of
7448 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7449 If the buffer cannot be interpreted, return NULL_TREE. */
7451 static tree
7452 native_interpret_real (tree type, const unsigned char *ptr, int len)
7454 enum machine_mode mode = TYPE_MODE (type);
7455 int total_bytes = GET_MODE_SIZE (mode);
7456 int byte, offset, word, words, bitpos;
7457 unsigned char value;
7458 /* There are always 32 bits in each long, no matter the size of
7459 the hosts long. We handle floating point representations with
7460 up to 192 bits. */
7461 REAL_VALUE_TYPE r;
7462 long tmp[6];
7464 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7465 if (total_bytes > len || total_bytes > 24)
7466 return NULL_TREE;
7467 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7469 memset (tmp, 0, sizeof (tmp));
7470 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7471 bitpos += BITS_PER_UNIT)
7473 byte = (bitpos / BITS_PER_UNIT) & 3;
7474 if (UNITS_PER_WORD < 4)
7476 word = byte / UNITS_PER_WORD;
7477 if (WORDS_BIG_ENDIAN)
7478 word = (words - 1) - word;
7479 offset = word * UNITS_PER_WORD;
7480 if (BYTES_BIG_ENDIAN)
7481 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7482 else
7483 offset += byte % UNITS_PER_WORD;
7485 else
7486 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7487 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7489 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7492 real_from_target (&r, tmp, mode);
7493 return build_real (type, r);
7497 /* Subroutine of native_interpret_expr. Interpret the contents of
7498 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7499 If the buffer cannot be interpreted, return NULL_TREE. */
7501 static tree
7502 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7504 tree etype, rpart, ipart;
7505 int size;
7507 etype = TREE_TYPE (type);
7508 size = GET_MODE_SIZE (TYPE_MODE (etype));
7509 if (size * 2 > len)
7510 return NULL_TREE;
7511 rpart = native_interpret_expr (etype, ptr, size);
7512 if (!rpart)
7513 return NULL_TREE;
7514 ipart = native_interpret_expr (etype, ptr+size, size);
7515 if (!ipart)
7516 return NULL_TREE;
7517 return build_complex (type, rpart, ipart);
7521 /* Subroutine of native_interpret_expr. Interpret the contents of
7522 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7523 If the buffer cannot be interpreted, return NULL_TREE. */
7525 static tree
7526 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7528 tree etype, elem;
7529 int i, size, count;
7530 tree *elements;
7532 etype = TREE_TYPE (type);
7533 size = GET_MODE_SIZE (TYPE_MODE (etype));
7534 count = TYPE_VECTOR_SUBPARTS (type);
7535 if (size * count > len)
7536 return NULL_TREE;
7538 elements = XALLOCAVEC (tree, count);
7539 for (i = count - 1; i >= 0; i--)
7541 elem = native_interpret_expr (etype, ptr+(i*size), size);
7542 if (!elem)
7543 return NULL_TREE;
7544 elements[i] = elem;
7546 return build_vector (type, elements);
7550 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7551 the buffer PTR of length LEN as a constant of type TYPE. For
7552 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7553 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7554 return NULL_TREE. */
7556 tree
7557 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7559 switch (TREE_CODE (type))
7561 case INTEGER_TYPE:
7562 case ENUMERAL_TYPE:
7563 case BOOLEAN_TYPE:
7564 case POINTER_TYPE:
7565 case REFERENCE_TYPE:
7566 return native_interpret_int (type, ptr, len);
7568 case REAL_TYPE:
7569 return native_interpret_real (type, ptr, len);
7571 case FIXED_POINT_TYPE:
7572 return native_interpret_fixed (type, ptr, len);
7574 case COMPLEX_TYPE:
7575 return native_interpret_complex (type, ptr, len);
7577 case VECTOR_TYPE:
7578 return native_interpret_vector (type, ptr, len);
7580 default:
7581 return NULL_TREE;
7585 /* Returns true if we can interpret the contents of a native encoding
7586 as TYPE. */
7588 static bool
7589 can_native_interpret_type_p (tree type)
7591 switch (TREE_CODE (type))
7593 case INTEGER_TYPE:
7594 case ENUMERAL_TYPE:
7595 case BOOLEAN_TYPE:
7596 case POINTER_TYPE:
7597 case REFERENCE_TYPE:
7598 case FIXED_POINT_TYPE:
7599 case REAL_TYPE:
7600 case COMPLEX_TYPE:
7601 case VECTOR_TYPE:
7602 return true;
7603 default:
7604 return false;
7608 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7609 TYPE at compile-time. If we're unable to perform the conversion
7610 return NULL_TREE. */
7612 static tree
7613 fold_view_convert_expr (tree type, tree expr)
7615 /* We support up to 512-bit values (for V8DFmode). */
7616 unsigned char buffer[64];
7617 int len;
7619 /* Check that the host and target are sane. */
7620 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7621 return NULL_TREE;
7623 len = native_encode_expr (expr, buffer, sizeof (buffer));
7624 if (len == 0)
7625 return NULL_TREE;
7627 return native_interpret_expr (type, buffer, len);
7630 /* Build an expression for the address of T. Folds away INDIRECT_REF
7631 to avoid confusing the gimplify process. */
7633 tree
7634 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7636 /* The size of the object is not relevant when talking about its address. */
7637 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7638 t = TREE_OPERAND (t, 0);
7640 if (TREE_CODE (t) == INDIRECT_REF)
7642 t = TREE_OPERAND (t, 0);
7644 if (TREE_TYPE (t) != ptrtype)
7645 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7647 else if (TREE_CODE (t) == MEM_REF
7648 && integer_zerop (TREE_OPERAND (t, 1)))
7649 return TREE_OPERAND (t, 0);
7650 else if (TREE_CODE (t) == MEM_REF
7651 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7652 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7653 TREE_OPERAND (t, 0),
7654 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7655 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7657 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7659 if (TREE_TYPE (t) != ptrtype)
7660 t = fold_convert_loc (loc, ptrtype, t);
7662 else
7663 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7665 return t;
7668 /* Build an expression for the address of T. */
7670 tree
7671 build_fold_addr_expr_loc (location_t loc, tree t)
7673 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7675 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7678 static bool vec_cst_ctor_to_array (tree, tree *);
7680 /* Fold a unary expression of code CODE and type TYPE with operand
7681 OP0. Return the folded expression if folding is successful.
7682 Otherwise, return NULL_TREE. */
7684 tree
7685 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7687 tree tem;
7688 tree arg0;
7689 enum tree_code_class kind = TREE_CODE_CLASS (code);
7691 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7692 && TREE_CODE_LENGTH (code) == 1);
7694 arg0 = op0;
7695 if (arg0)
7697 if (CONVERT_EXPR_CODE_P (code)
7698 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7700 /* Don't use STRIP_NOPS, because signedness of argument type
7701 matters. */
7702 STRIP_SIGN_NOPS (arg0);
7704 else
7706 /* Strip any conversions that don't change the mode. This
7707 is safe for every expression, except for a comparison
7708 expression because its signedness is derived from its
7709 operands.
7711 Note that this is done as an internal manipulation within
7712 the constant folder, in order to find the simplest
7713 representation of the arguments so that their form can be
7714 studied. In any cases, the appropriate type conversions
7715 should be put back in the tree that will get out of the
7716 constant folder. */
7717 STRIP_NOPS (arg0);
7721 if (TREE_CODE_CLASS (code) == tcc_unary)
7723 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7724 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7725 fold_build1_loc (loc, code, type,
7726 fold_convert_loc (loc, TREE_TYPE (op0),
7727 TREE_OPERAND (arg0, 1))));
7728 else if (TREE_CODE (arg0) == COND_EXPR)
7730 tree arg01 = TREE_OPERAND (arg0, 1);
7731 tree arg02 = TREE_OPERAND (arg0, 2);
7732 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7733 arg01 = fold_build1_loc (loc, code, type,
7734 fold_convert_loc (loc,
7735 TREE_TYPE (op0), arg01));
7736 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7737 arg02 = fold_build1_loc (loc, code, type,
7738 fold_convert_loc (loc,
7739 TREE_TYPE (op0), arg02));
7740 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7741 arg01, arg02);
7743 /* If this was a conversion, and all we did was to move into
7744 inside the COND_EXPR, bring it back out. But leave it if
7745 it is a conversion from integer to integer and the
7746 result precision is no wider than a word since such a
7747 conversion is cheap and may be optimized away by combine,
7748 while it couldn't if it were outside the COND_EXPR. Then return
7749 so we don't get into an infinite recursion loop taking the
7750 conversion out and then back in. */
7752 if ((CONVERT_EXPR_CODE_P (code)
7753 || code == NON_LVALUE_EXPR)
7754 && TREE_CODE (tem) == COND_EXPR
7755 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7756 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7757 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7758 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7759 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7760 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7761 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7762 && (INTEGRAL_TYPE_P
7763 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7764 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7765 || flag_syntax_only))
7766 tem = build1_loc (loc, code, type,
7767 build3 (COND_EXPR,
7768 TREE_TYPE (TREE_OPERAND
7769 (TREE_OPERAND (tem, 1), 0)),
7770 TREE_OPERAND (tem, 0),
7771 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7772 TREE_OPERAND (TREE_OPERAND (tem, 2),
7773 0)));
7774 return tem;
7778 switch (code)
7780 case PAREN_EXPR:
7781 /* Re-association barriers around constants and other re-association
7782 barriers can be removed. */
7783 if (CONSTANT_CLASS_P (op0)
7784 || TREE_CODE (op0) == PAREN_EXPR)
7785 return fold_convert_loc (loc, type, op0);
7786 return NULL_TREE;
7788 CASE_CONVERT:
7789 case FLOAT_EXPR:
7790 case FIX_TRUNC_EXPR:
7791 if (TREE_TYPE (op0) == type)
7792 return op0;
7794 if (COMPARISON_CLASS_P (op0))
7796 /* If we have (type) (a CMP b) and type is an integral type, return
7797 new expression involving the new type. Canonicalize
7798 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7799 non-integral type.
7800 Do not fold the result as that would not simplify further, also
7801 folding again results in recursions. */
7802 if (TREE_CODE (type) == BOOLEAN_TYPE)
7803 return build2_loc (loc, TREE_CODE (op0), type,
7804 TREE_OPERAND (op0, 0),
7805 TREE_OPERAND (op0, 1));
7806 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7807 && TREE_CODE (type) != VECTOR_TYPE)
7808 return build3_loc (loc, COND_EXPR, type, op0,
7809 constant_boolean_node (true, type),
7810 constant_boolean_node (false, type));
7813 /* Handle cases of two conversions in a row. */
7814 if (CONVERT_EXPR_P (op0))
7816 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7817 tree inter_type = TREE_TYPE (op0);
7818 int inside_int = INTEGRAL_TYPE_P (inside_type);
7819 int inside_ptr = POINTER_TYPE_P (inside_type);
7820 int inside_float = FLOAT_TYPE_P (inside_type);
7821 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7822 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7823 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7824 int inter_int = INTEGRAL_TYPE_P (inter_type);
7825 int inter_ptr = POINTER_TYPE_P (inter_type);
7826 int inter_float = FLOAT_TYPE_P (inter_type);
7827 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7828 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7829 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7830 int final_int = INTEGRAL_TYPE_P (type);
7831 int final_ptr = POINTER_TYPE_P (type);
7832 int final_float = FLOAT_TYPE_P (type);
7833 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7834 unsigned int final_prec = TYPE_PRECISION (type);
7835 int final_unsignedp = TYPE_UNSIGNED (type);
7837 /* In addition to the cases of two conversions in a row
7838 handled below, if we are converting something to its own
7839 type via an object of identical or wider precision, neither
7840 conversion is needed. */
7841 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7842 && (((inter_int || inter_ptr) && final_int)
7843 || (inter_float && final_float))
7844 && inter_prec >= final_prec)
7845 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7847 /* Likewise, if the intermediate and initial types are either both
7848 float or both integer, we don't need the middle conversion if the
7849 former is wider than the latter and doesn't change the signedness
7850 (for integers). Avoid this if the final type is a pointer since
7851 then we sometimes need the middle conversion. Likewise if the
7852 final type has a precision not equal to the size of its mode. */
7853 if (((inter_int && inside_int)
7854 || (inter_float && inside_float)
7855 || (inter_vec && inside_vec))
7856 && inter_prec >= inside_prec
7857 && (inter_float || inter_vec
7858 || inter_unsignedp == inside_unsignedp)
7859 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7860 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7861 && ! final_ptr
7862 && (! final_vec || inter_prec == inside_prec))
7863 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7865 /* If we have a sign-extension of a zero-extended value, we can
7866 replace that by a single zero-extension. Likewise if the
7867 final conversion does not change precision we can drop the
7868 intermediate conversion. */
7869 if (inside_int && inter_int && final_int
7870 && ((inside_prec < inter_prec && inter_prec < final_prec
7871 && inside_unsignedp && !inter_unsignedp)
7872 || final_prec == inter_prec))
7873 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7875 /* Two conversions in a row are not needed unless:
7876 - some conversion is floating-point (overstrict for now), or
7877 - some conversion is a vector (overstrict for now), or
7878 - the intermediate type is narrower than both initial and
7879 final, or
7880 - the intermediate type and innermost type differ in signedness,
7881 and the outermost type is wider than the intermediate, or
7882 - the initial type is a pointer type and the precisions of the
7883 intermediate and final types differ, or
7884 - the final type is a pointer type and the precisions of the
7885 initial and intermediate types differ. */
7886 if (! inside_float && ! inter_float && ! final_float
7887 && ! inside_vec && ! inter_vec && ! final_vec
7888 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7889 && ! (inside_int && inter_int
7890 && inter_unsignedp != inside_unsignedp
7891 && inter_prec < final_prec)
7892 && ((inter_unsignedp && inter_prec > inside_prec)
7893 == (final_unsignedp && final_prec > inter_prec))
7894 && ! (inside_ptr && inter_prec != final_prec)
7895 && ! (final_ptr && inside_prec != inter_prec)
7896 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7897 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7898 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7901 /* Handle (T *)&A.B.C for A being of type T and B and C
7902 living at offset zero. This occurs frequently in
7903 C++ upcasting and then accessing the base. */
7904 if (TREE_CODE (op0) == ADDR_EXPR
7905 && POINTER_TYPE_P (type)
7906 && handled_component_p (TREE_OPERAND (op0, 0)))
7908 HOST_WIDE_INT bitsize, bitpos;
7909 tree offset;
7910 enum machine_mode mode;
7911 int unsignedp, volatilep;
7912 tree base = TREE_OPERAND (op0, 0);
7913 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7914 &mode, &unsignedp, &volatilep, false);
7915 /* If the reference was to a (constant) zero offset, we can use
7916 the address of the base if it has the same base type
7917 as the result type and the pointer type is unqualified. */
7918 if (! offset && bitpos == 0
7919 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7920 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7921 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7922 return fold_convert_loc (loc, type,
7923 build_fold_addr_expr_loc (loc, base));
7926 if (TREE_CODE (op0) == MODIFY_EXPR
7927 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7928 /* Detect assigning a bitfield. */
7929 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7930 && DECL_BIT_FIELD
7931 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7933 /* Don't leave an assignment inside a conversion
7934 unless assigning a bitfield. */
7935 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7936 /* First do the assignment, then return converted constant. */
7937 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7938 TREE_NO_WARNING (tem) = 1;
7939 TREE_USED (tem) = 1;
7940 return tem;
7943 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7944 constants (if x has signed type, the sign bit cannot be set
7945 in c). This folds extension into the BIT_AND_EXPR.
7946 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7947 very likely don't have maximal range for their precision and this
7948 transformation effectively doesn't preserve non-maximal ranges. */
7949 if (TREE_CODE (type) == INTEGER_TYPE
7950 && TREE_CODE (op0) == BIT_AND_EXPR
7951 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7953 tree and_expr = op0;
7954 tree and0 = TREE_OPERAND (and_expr, 0);
7955 tree and1 = TREE_OPERAND (and_expr, 1);
7956 int change = 0;
7958 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7959 || (TYPE_PRECISION (type)
7960 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7961 change = 1;
7962 else if (TYPE_PRECISION (TREE_TYPE (and1))
7963 <= HOST_BITS_PER_WIDE_INT
7964 && host_integerp (and1, 1))
7966 unsigned HOST_WIDE_INT cst;
7968 cst = tree_low_cst (and1, 1);
7969 cst &= (HOST_WIDE_INT) -1
7970 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7971 change = (cst == 0);
7972 #ifdef LOAD_EXTEND_OP
7973 if (change
7974 && !flag_syntax_only
7975 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7976 == ZERO_EXTEND))
7978 tree uns = unsigned_type_for (TREE_TYPE (and0));
7979 and0 = fold_convert_loc (loc, uns, and0);
7980 and1 = fold_convert_loc (loc, uns, and1);
7982 #endif
7984 if (change)
7986 tem = force_fit_type_double (type, tree_to_double_int (and1),
7987 0, TREE_OVERFLOW (and1));
7988 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7989 fold_convert_loc (loc, type, and0), tem);
7993 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7994 when one of the new casts will fold away. Conservatively we assume
7995 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7996 if (POINTER_TYPE_P (type)
7997 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7998 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7999 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8000 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8001 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8003 tree arg00 = TREE_OPERAND (arg0, 0);
8004 tree arg01 = TREE_OPERAND (arg0, 1);
8006 return fold_build_pointer_plus_loc
8007 (loc, fold_convert_loc (loc, type, arg00), arg01);
8010 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8011 of the same precision, and X is an integer type not narrower than
8012 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8013 if (INTEGRAL_TYPE_P (type)
8014 && TREE_CODE (op0) == BIT_NOT_EXPR
8015 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8016 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8017 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8019 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8020 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8021 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8022 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8023 fold_convert_loc (loc, type, tem));
8026 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8027 type of X and Y (integer types only). */
8028 if (INTEGRAL_TYPE_P (type)
8029 && TREE_CODE (op0) == MULT_EXPR
8030 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8031 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8033 /* Be careful not to introduce new overflows. */
8034 tree mult_type;
8035 if (TYPE_OVERFLOW_WRAPS (type))
8036 mult_type = type;
8037 else
8038 mult_type = unsigned_type_for (type);
8040 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8042 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8043 fold_convert_loc (loc, mult_type,
8044 TREE_OPERAND (op0, 0)),
8045 fold_convert_loc (loc, mult_type,
8046 TREE_OPERAND (op0, 1)));
8047 return fold_convert_loc (loc, type, tem);
8051 tem = fold_convert_const (code, type, op0);
8052 return tem ? tem : NULL_TREE;
8054 case ADDR_SPACE_CONVERT_EXPR:
8055 if (integer_zerop (arg0))
8056 return fold_convert_const (code, type, arg0);
8057 return NULL_TREE;
8059 case FIXED_CONVERT_EXPR:
8060 tem = fold_convert_const (code, type, arg0);
8061 return tem ? tem : NULL_TREE;
8063 case VIEW_CONVERT_EXPR:
8064 if (TREE_TYPE (op0) == type)
8065 return op0;
8066 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8067 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8068 type, TREE_OPERAND (op0, 0));
8069 if (TREE_CODE (op0) == MEM_REF)
8070 return fold_build2_loc (loc, MEM_REF, type,
8071 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8073 /* For integral conversions with the same precision or pointer
8074 conversions use a NOP_EXPR instead. */
8075 if ((INTEGRAL_TYPE_P (type)
8076 || POINTER_TYPE_P (type))
8077 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8078 || POINTER_TYPE_P (TREE_TYPE (op0)))
8079 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8080 return fold_convert_loc (loc, type, op0);
8082 /* Strip inner integral conversions that do not change the precision. */
8083 if (CONVERT_EXPR_P (op0)
8084 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8085 || POINTER_TYPE_P (TREE_TYPE (op0)))
8086 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8087 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8088 && (TYPE_PRECISION (TREE_TYPE (op0))
8089 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8090 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8091 type, TREE_OPERAND (op0, 0));
8093 return fold_view_convert_expr (type, op0);
8095 case NEGATE_EXPR:
8096 tem = fold_negate_expr (loc, arg0);
8097 if (tem)
8098 return fold_convert_loc (loc, type, tem);
8099 return NULL_TREE;
8101 case ABS_EXPR:
8102 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8103 return fold_abs_const (arg0, type);
8104 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8105 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8106 /* Convert fabs((double)float) into (double)fabsf(float). */
8107 else if (TREE_CODE (arg0) == NOP_EXPR
8108 && TREE_CODE (type) == REAL_TYPE)
8110 tree targ0 = strip_float_extensions (arg0);
8111 if (targ0 != arg0)
8112 return fold_convert_loc (loc, type,
8113 fold_build1_loc (loc, ABS_EXPR,
8114 TREE_TYPE (targ0),
8115 targ0));
8117 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8118 else if (TREE_CODE (arg0) == ABS_EXPR)
8119 return arg0;
8120 else if (tree_expr_nonnegative_p (arg0))
8121 return arg0;
8123 /* Strip sign ops from argument. */
8124 if (TREE_CODE (type) == REAL_TYPE)
8126 tem = fold_strip_sign_ops (arg0);
8127 if (tem)
8128 return fold_build1_loc (loc, ABS_EXPR, type,
8129 fold_convert_loc (loc, type, tem));
8131 return NULL_TREE;
8133 case CONJ_EXPR:
8134 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8135 return fold_convert_loc (loc, type, arg0);
8136 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8138 tree itype = TREE_TYPE (type);
8139 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8140 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8141 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8142 negate_expr (ipart));
8144 if (TREE_CODE (arg0) == COMPLEX_CST)
8146 tree itype = TREE_TYPE (type);
8147 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8148 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8149 return build_complex (type, rpart, negate_expr (ipart));
8151 if (TREE_CODE (arg0) == CONJ_EXPR)
8152 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8153 return NULL_TREE;
8155 case BIT_NOT_EXPR:
8156 if (TREE_CODE (arg0) == INTEGER_CST)
8157 return fold_not_const (arg0, type);
8158 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8159 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8160 /* Convert ~ (-A) to A - 1. */
8161 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8162 return fold_build2_loc (loc, MINUS_EXPR, type,
8163 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8164 build_int_cst (type, 1));
8165 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8166 else if (INTEGRAL_TYPE_P (type)
8167 && ((TREE_CODE (arg0) == MINUS_EXPR
8168 && integer_onep (TREE_OPERAND (arg0, 1)))
8169 || (TREE_CODE (arg0) == PLUS_EXPR
8170 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8171 return fold_build1_loc (loc, NEGATE_EXPR, type,
8172 fold_convert_loc (loc, type,
8173 TREE_OPERAND (arg0, 0)));
8174 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8175 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8176 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8177 fold_convert_loc (loc, type,
8178 TREE_OPERAND (arg0, 0)))))
8179 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8180 fold_convert_loc (loc, type,
8181 TREE_OPERAND (arg0, 1)));
8182 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8183 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8184 fold_convert_loc (loc, type,
8185 TREE_OPERAND (arg0, 1)))))
8186 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8187 fold_convert_loc (loc, type,
8188 TREE_OPERAND (arg0, 0)), tem);
8189 /* Perform BIT_NOT_EXPR on each element individually. */
8190 else if (TREE_CODE (arg0) == VECTOR_CST)
8192 tree *elements;
8193 tree elem;
8194 unsigned count = VECTOR_CST_NELTS (arg0), i;
8196 elements = XALLOCAVEC (tree, count);
8197 for (i = 0; i < count; i++)
8199 elem = VECTOR_CST_ELT (arg0, i);
8200 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8201 if (elem == NULL_TREE)
8202 break;
8203 elements[i] = elem;
8205 if (i == count)
8206 return build_vector (type, elements);
8209 return NULL_TREE;
8211 case TRUTH_NOT_EXPR:
8212 /* The argument to invert_truthvalue must have Boolean type. */
8213 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8214 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8216 /* Note that the operand of this must be an int
8217 and its values must be 0 or 1.
8218 ("true" is a fixed value perhaps depending on the language,
8219 but we don't handle values other than 1 correctly yet.) */
8220 tem = fold_truth_not_expr (loc, arg0);
8221 if (!tem)
8222 return NULL_TREE;
8223 return fold_convert_loc (loc, type, tem);
8225 case REALPART_EXPR:
8226 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8227 return fold_convert_loc (loc, type, arg0);
8228 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8229 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8230 TREE_OPERAND (arg0, 1));
8231 if (TREE_CODE (arg0) == COMPLEX_CST)
8232 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8233 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8235 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8236 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8237 fold_build1_loc (loc, REALPART_EXPR, itype,
8238 TREE_OPERAND (arg0, 0)),
8239 fold_build1_loc (loc, REALPART_EXPR, itype,
8240 TREE_OPERAND (arg0, 1)));
8241 return fold_convert_loc (loc, type, tem);
8243 if (TREE_CODE (arg0) == CONJ_EXPR)
8245 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8246 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8247 TREE_OPERAND (arg0, 0));
8248 return fold_convert_loc (loc, type, tem);
8250 if (TREE_CODE (arg0) == CALL_EXPR)
8252 tree fn = get_callee_fndecl (arg0);
8253 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8254 switch (DECL_FUNCTION_CODE (fn))
8256 CASE_FLT_FN (BUILT_IN_CEXPI):
8257 fn = mathfn_built_in (type, BUILT_IN_COS);
8258 if (fn)
8259 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8260 break;
8262 default:
8263 break;
8266 return NULL_TREE;
8268 case IMAGPART_EXPR:
8269 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8270 return build_zero_cst (type);
8271 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8272 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8273 TREE_OPERAND (arg0, 0));
8274 if (TREE_CODE (arg0) == COMPLEX_CST)
8275 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8276 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8278 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8279 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8280 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8281 TREE_OPERAND (arg0, 0)),
8282 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8283 TREE_OPERAND (arg0, 1)));
8284 return fold_convert_loc (loc, type, tem);
8286 if (TREE_CODE (arg0) == CONJ_EXPR)
8288 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8289 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8290 return fold_convert_loc (loc, type, negate_expr (tem));
8292 if (TREE_CODE (arg0) == CALL_EXPR)
8294 tree fn = get_callee_fndecl (arg0);
8295 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8296 switch (DECL_FUNCTION_CODE (fn))
8298 CASE_FLT_FN (BUILT_IN_CEXPI):
8299 fn = mathfn_built_in (type, BUILT_IN_SIN);
8300 if (fn)
8301 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8302 break;
8304 default:
8305 break;
8308 return NULL_TREE;
8310 case INDIRECT_REF:
8311 /* Fold *&X to X if X is an lvalue. */
8312 if (TREE_CODE (op0) == ADDR_EXPR)
8314 tree op00 = TREE_OPERAND (op0, 0);
8315 if ((TREE_CODE (op00) == VAR_DECL
8316 || TREE_CODE (op00) == PARM_DECL
8317 || TREE_CODE (op00) == RESULT_DECL)
8318 && !TREE_READONLY (op00))
8319 return op00;
8321 return NULL_TREE;
8323 case VEC_UNPACK_LO_EXPR:
8324 case VEC_UNPACK_HI_EXPR:
8325 case VEC_UNPACK_FLOAT_LO_EXPR:
8326 case VEC_UNPACK_FLOAT_HI_EXPR:
8328 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8329 tree *elts;
8330 enum tree_code subcode;
8332 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8333 if (TREE_CODE (arg0) != VECTOR_CST)
8334 return NULL_TREE;
8336 elts = XALLOCAVEC (tree, nelts * 2);
8337 if (!vec_cst_ctor_to_array (arg0, elts))
8338 return NULL_TREE;
8340 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8341 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8342 elts += nelts;
8344 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8345 subcode = NOP_EXPR;
8346 else
8347 subcode = FLOAT_EXPR;
8349 for (i = 0; i < nelts; i++)
8351 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8352 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8353 return NULL_TREE;
8356 return build_vector (type, elts);
8359 case REDUC_MIN_EXPR:
8360 case REDUC_MAX_EXPR:
8361 case REDUC_PLUS_EXPR:
8363 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8364 tree *elts;
8365 enum tree_code subcode;
8367 if (TREE_CODE (op0) != VECTOR_CST)
8368 return NULL_TREE;
8370 elts = XALLOCAVEC (tree, nelts);
8371 if (!vec_cst_ctor_to_array (op0, elts))
8372 return NULL_TREE;
8374 switch (code)
8376 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8377 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8378 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8379 default: gcc_unreachable ();
8382 for (i = 1; i < nelts; i++)
8384 elts[0] = const_binop (subcode, elts[0], elts[i]);
8385 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8386 return NULL_TREE;
8387 elts[i] = build_zero_cst (TREE_TYPE (type));
8390 return build_vector (type, elts);
8393 default:
8394 return NULL_TREE;
8395 } /* switch (code) */
8399 /* If the operation was a conversion do _not_ mark a resulting constant
8400 with TREE_OVERFLOW if the original constant was not. These conversions
8401 have implementation defined behavior and retaining the TREE_OVERFLOW
8402 flag here would confuse later passes such as VRP. */
8403 tree
8404 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8405 tree type, tree op0)
8407 tree res = fold_unary_loc (loc, code, type, op0);
8408 if (res
8409 && TREE_CODE (res) == INTEGER_CST
8410 && TREE_CODE (op0) == INTEGER_CST
8411 && CONVERT_EXPR_CODE_P (code))
8412 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8414 return res;
8417 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8418 operands OP0 and OP1. LOC is the location of the resulting expression.
8419 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8420 Return the folded expression if folding is successful. Otherwise,
8421 return NULL_TREE. */
8422 static tree
8423 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8424 tree arg0, tree arg1, tree op0, tree op1)
8426 tree tem;
8428 /* We only do these simplifications if we are optimizing. */
8429 if (!optimize)
8430 return NULL_TREE;
8432 /* Check for things like (A || B) && (A || C). We can convert this
8433 to A || (B && C). Note that either operator can be any of the four
8434 truth and/or operations and the transformation will still be
8435 valid. Also note that we only care about order for the
8436 ANDIF and ORIF operators. If B contains side effects, this
8437 might change the truth-value of A. */
8438 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8439 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8440 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8441 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8442 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8443 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8445 tree a00 = TREE_OPERAND (arg0, 0);
8446 tree a01 = TREE_OPERAND (arg0, 1);
8447 tree a10 = TREE_OPERAND (arg1, 0);
8448 tree a11 = TREE_OPERAND (arg1, 1);
8449 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8450 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8451 && (code == TRUTH_AND_EXPR
8452 || code == TRUTH_OR_EXPR));
8454 if (operand_equal_p (a00, a10, 0))
8455 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8456 fold_build2_loc (loc, code, type, a01, a11));
8457 else if (commutative && operand_equal_p (a00, a11, 0))
8458 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8459 fold_build2_loc (loc, code, type, a01, a10));
8460 else if (commutative && operand_equal_p (a01, a10, 0))
8461 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8462 fold_build2_loc (loc, code, type, a00, a11));
8464 /* This case if tricky because we must either have commutative
8465 operators or else A10 must not have side-effects. */
8467 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8468 && operand_equal_p (a01, a11, 0))
8469 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8470 fold_build2_loc (loc, code, type, a00, a10),
8471 a01);
8474 /* See if we can build a range comparison. */
8475 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8476 return tem;
8478 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8479 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8481 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8482 if (tem)
8483 return fold_build2_loc (loc, code, type, tem, arg1);
8486 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8487 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8489 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8490 if (tem)
8491 return fold_build2_loc (loc, code, type, arg0, tem);
8494 /* Check for the possibility of merging component references. If our
8495 lhs is another similar operation, try to merge its rhs with our
8496 rhs. Then try to merge our lhs and rhs. */
8497 if (TREE_CODE (arg0) == code
8498 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8499 TREE_OPERAND (arg0, 1), arg1)))
8500 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8502 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8503 return tem;
8505 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8506 && (code == TRUTH_AND_EXPR
8507 || code == TRUTH_ANDIF_EXPR
8508 || code == TRUTH_OR_EXPR
8509 || code == TRUTH_ORIF_EXPR))
8511 enum tree_code ncode, icode;
8513 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8514 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8515 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8517 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8518 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8519 We don't want to pack more than two leafs to a non-IF AND/OR
8520 expression.
8521 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8522 equal to IF-CODE, then we don't want to add right-hand operand.
8523 If the inner right-hand side of left-hand operand has
8524 side-effects, or isn't simple, then we can't add to it,
8525 as otherwise we might destroy if-sequence. */
8526 if (TREE_CODE (arg0) == icode
8527 && simple_operand_p_2 (arg1)
8528 /* Needed for sequence points to handle trappings, and
8529 side-effects. */
8530 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8532 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8533 arg1);
8534 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8535 tem);
8537 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8538 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8539 else if (TREE_CODE (arg1) == icode
8540 && simple_operand_p_2 (arg0)
8541 /* Needed for sequence points to handle trappings, and
8542 side-effects. */
8543 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8545 tem = fold_build2_loc (loc, ncode, type,
8546 arg0, TREE_OPERAND (arg1, 0));
8547 return fold_build2_loc (loc, icode, type, tem,
8548 TREE_OPERAND (arg1, 1));
8550 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8551 into (A OR B).
8552 For sequence point consistancy, we need to check for trapping,
8553 and side-effects. */
8554 else if (code == icode && simple_operand_p_2 (arg0)
8555 && simple_operand_p_2 (arg1))
8556 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8559 return NULL_TREE;
8562 /* Fold a binary expression of code CODE and type TYPE with operands
8563 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8564 Return the folded expression if folding is successful. Otherwise,
8565 return NULL_TREE. */
8567 static tree
8568 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8570 enum tree_code compl_code;
8572 if (code == MIN_EXPR)
8573 compl_code = MAX_EXPR;
8574 else if (code == MAX_EXPR)
8575 compl_code = MIN_EXPR;
8576 else
8577 gcc_unreachable ();
8579 /* MIN (MAX (a, b), b) == b. */
8580 if (TREE_CODE (op0) == compl_code
8581 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8582 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8584 /* MIN (MAX (b, a), b) == b. */
8585 if (TREE_CODE (op0) == compl_code
8586 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8587 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8588 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8590 /* MIN (a, MAX (a, b)) == a. */
8591 if (TREE_CODE (op1) == compl_code
8592 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8593 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8594 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8596 /* MIN (a, MAX (b, a)) == a. */
8597 if (TREE_CODE (op1) == compl_code
8598 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8599 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8600 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8602 return NULL_TREE;
8605 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8606 by changing CODE to reduce the magnitude of constants involved in
8607 ARG0 of the comparison.
8608 Returns a canonicalized comparison tree if a simplification was
8609 possible, otherwise returns NULL_TREE.
8610 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8611 valid if signed overflow is undefined. */
8613 static tree
8614 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8615 tree arg0, tree arg1,
8616 bool *strict_overflow_p)
8618 enum tree_code code0 = TREE_CODE (arg0);
8619 tree t, cst0 = NULL_TREE;
8620 int sgn0;
8621 bool swap = false;
8623 /* Match A +- CST code arg1 and CST code arg1. We can change the
8624 first form only if overflow is undefined. */
8625 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8626 /* In principle pointers also have undefined overflow behavior,
8627 but that causes problems elsewhere. */
8628 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8629 && (code0 == MINUS_EXPR
8630 || code0 == PLUS_EXPR)
8631 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8632 || code0 == INTEGER_CST))
8633 return NULL_TREE;
8635 /* Identify the constant in arg0 and its sign. */
8636 if (code0 == INTEGER_CST)
8637 cst0 = arg0;
8638 else
8639 cst0 = TREE_OPERAND (arg0, 1);
8640 sgn0 = tree_int_cst_sgn (cst0);
8642 /* Overflowed constants and zero will cause problems. */
8643 if (integer_zerop (cst0)
8644 || TREE_OVERFLOW (cst0))
8645 return NULL_TREE;
8647 /* See if we can reduce the magnitude of the constant in
8648 arg0 by changing the comparison code. */
8649 if (code0 == INTEGER_CST)
8651 /* CST <= arg1 -> CST-1 < arg1. */
8652 if (code == LE_EXPR && sgn0 == 1)
8653 code = LT_EXPR;
8654 /* -CST < arg1 -> -CST-1 <= arg1. */
8655 else if (code == LT_EXPR && sgn0 == -1)
8656 code = LE_EXPR;
8657 /* CST > arg1 -> CST-1 >= arg1. */
8658 else if (code == GT_EXPR && sgn0 == 1)
8659 code = GE_EXPR;
8660 /* -CST >= arg1 -> -CST-1 > arg1. */
8661 else if (code == GE_EXPR && sgn0 == -1)
8662 code = GT_EXPR;
8663 else
8664 return NULL_TREE;
8665 /* arg1 code' CST' might be more canonical. */
8666 swap = true;
8668 else
8670 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8671 if (code == LT_EXPR
8672 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8673 code = LE_EXPR;
8674 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8675 else if (code == GT_EXPR
8676 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8677 code = GE_EXPR;
8678 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8679 else if (code == LE_EXPR
8680 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8681 code = LT_EXPR;
8682 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8683 else if (code == GE_EXPR
8684 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8685 code = GT_EXPR;
8686 else
8687 return NULL_TREE;
8688 *strict_overflow_p = true;
8691 /* Now build the constant reduced in magnitude. But not if that
8692 would produce one outside of its types range. */
8693 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8694 && ((sgn0 == 1
8695 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8696 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8697 || (sgn0 == -1
8698 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8699 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8700 /* We cannot swap the comparison here as that would cause us to
8701 endlessly recurse. */
8702 return NULL_TREE;
8704 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8705 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8706 if (code0 != INTEGER_CST)
8707 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8708 t = fold_convert (TREE_TYPE (arg1), t);
8710 /* If swapping might yield to a more canonical form, do so. */
8711 if (swap)
8712 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8713 else
8714 return fold_build2_loc (loc, code, type, t, arg1);
8717 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8718 overflow further. Try to decrease the magnitude of constants involved
8719 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8720 and put sole constants at the second argument position.
8721 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8723 static tree
8724 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8725 tree arg0, tree arg1)
8727 tree t;
8728 bool strict_overflow_p;
8729 const char * const warnmsg = G_("assuming signed overflow does not occur "
8730 "when reducing constant in comparison");
8732 /* Try canonicalization by simplifying arg0. */
8733 strict_overflow_p = false;
8734 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8735 &strict_overflow_p);
8736 if (t)
8738 if (strict_overflow_p)
8739 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8740 return t;
8743 /* Try canonicalization by simplifying arg1 using the swapped
8744 comparison. */
8745 code = swap_tree_comparison (code);
8746 strict_overflow_p = false;
8747 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8748 &strict_overflow_p);
8749 if (t && strict_overflow_p)
8750 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8751 return t;
8754 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8755 space. This is used to avoid issuing overflow warnings for
8756 expressions like &p->x which can not wrap. */
8758 static bool
8759 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8761 double_int di_offset, total;
8763 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8764 return true;
8766 if (bitpos < 0)
8767 return true;
8769 if (offset == NULL_TREE)
8770 di_offset = double_int_zero;
8771 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8772 return true;
8773 else
8774 di_offset = TREE_INT_CST (offset);
8776 bool overflow;
8777 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8778 total = di_offset.add_with_sign (units, true, &overflow);
8779 if (overflow)
8780 return true;
8782 if (total.high != 0)
8783 return true;
8785 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8786 if (size <= 0)
8787 return true;
8789 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8790 array. */
8791 if (TREE_CODE (base) == ADDR_EXPR)
8793 HOST_WIDE_INT base_size;
8795 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8796 if (base_size > 0 && size < base_size)
8797 size = base_size;
8800 return total.low > (unsigned HOST_WIDE_INT) size;
8803 /* Subroutine of fold_binary. This routine performs all of the
8804 transformations that are common to the equality/inequality
8805 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8806 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8807 fold_binary should call fold_binary. Fold a comparison with
8808 tree code CODE and type TYPE with operands OP0 and OP1. Return
8809 the folded comparison or NULL_TREE. */
8811 static tree
8812 fold_comparison (location_t loc, enum tree_code code, tree type,
8813 tree op0, tree op1)
8815 tree arg0, arg1, tem;
8817 arg0 = op0;
8818 arg1 = op1;
8820 STRIP_SIGN_NOPS (arg0);
8821 STRIP_SIGN_NOPS (arg1);
8823 tem = fold_relational_const (code, type, arg0, arg1);
8824 if (tem != NULL_TREE)
8825 return tem;
8827 /* If one arg is a real or integer constant, put it last. */
8828 if (tree_swap_operands_p (arg0, arg1, true))
8829 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8831 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8832 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8833 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8834 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8835 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8836 && (TREE_CODE (arg1) == INTEGER_CST
8837 && !TREE_OVERFLOW (arg1)))
8839 tree const1 = TREE_OPERAND (arg0, 1);
8840 tree const2 = arg1;
8841 tree variable = TREE_OPERAND (arg0, 0);
8842 tree lhs;
8843 int lhs_add;
8844 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8846 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8847 TREE_TYPE (arg1), const2, const1);
8849 /* If the constant operation overflowed this can be
8850 simplified as a comparison against INT_MAX/INT_MIN. */
8851 if (TREE_CODE (lhs) == INTEGER_CST
8852 && TREE_OVERFLOW (lhs))
8854 int const1_sgn = tree_int_cst_sgn (const1);
8855 enum tree_code code2 = code;
8857 /* Get the sign of the constant on the lhs if the
8858 operation were VARIABLE + CONST1. */
8859 if (TREE_CODE (arg0) == MINUS_EXPR)
8860 const1_sgn = -const1_sgn;
8862 /* The sign of the constant determines if we overflowed
8863 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8864 Canonicalize to the INT_MIN overflow by swapping the comparison
8865 if necessary. */
8866 if (const1_sgn == -1)
8867 code2 = swap_tree_comparison (code);
8869 /* We now can look at the canonicalized case
8870 VARIABLE + 1 CODE2 INT_MIN
8871 and decide on the result. */
8872 if (code2 == LT_EXPR
8873 || code2 == LE_EXPR
8874 || code2 == EQ_EXPR)
8875 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8876 else if (code2 == NE_EXPR
8877 || code2 == GE_EXPR
8878 || code2 == GT_EXPR)
8879 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8882 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8883 && (TREE_CODE (lhs) != INTEGER_CST
8884 || !TREE_OVERFLOW (lhs)))
8886 if (code != EQ_EXPR && code != NE_EXPR)
8887 fold_overflow_warning ("assuming signed overflow does not occur "
8888 "when changing X +- C1 cmp C2 to "
8889 "X cmp C1 +- C2",
8890 WARN_STRICT_OVERFLOW_COMPARISON);
8891 return fold_build2_loc (loc, code, type, variable, lhs);
8895 /* For comparisons of pointers we can decompose it to a compile time
8896 comparison of the base objects and the offsets into the object.
8897 This requires at least one operand being an ADDR_EXPR or a
8898 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8899 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8900 && (TREE_CODE (arg0) == ADDR_EXPR
8901 || TREE_CODE (arg1) == ADDR_EXPR
8902 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8903 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8905 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8906 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8907 enum machine_mode mode;
8908 int volatilep, unsignedp;
8909 bool indirect_base0 = false, indirect_base1 = false;
8911 /* Get base and offset for the access. Strip ADDR_EXPR for
8912 get_inner_reference, but put it back by stripping INDIRECT_REF
8913 off the base object if possible. indirect_baseN will be true
8914 if baseN is not an address but refers to the object itself. */
8915 base0 = arg0;
8916 if (TREE_CODE (arg0) == ADDR_EXPR)
8918 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8919 &bitsize, &bitpos0, &offset0, &mode,
8920 &unsignedp, &volatilep, false);
8921 if (TREE_CODE (base0) == INDIRECT_REF)
8922 base0 = TREE_OPERAND (base0, 0);
8923 else
8924 indirect_base0 = true;
8926 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8928 base0 = TREE_OPERAND (arg0, 0);
8929 STRIP_SIGN_NOPS (base0);
8930 if (TREE_CODE (base0) == ADDR_EXPR)
8932 base0 = TREE_OPERAND (base0, 0);
8933 indirect_base0 = true;
8935 offset0 = TREE_OPERAND (arg0, 1);
8936 if (host_integerp (offset0, 0))
8938 HOST_WIDE_INT off = size_low_cst (offset0);
8939 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8940 * BITS_PER_UNIT)
8941 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8943 bitpos0 = off * BITS_PER_UNIT;
8944 offset0 = NULL_TREE;
8949 base1 = arg1;
8950 if (TREE_CODE (arg1) == ADDR_EXPR)
8952 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8953 &bitsize, &bitpos1, &offset1, &mode,
8954 &unsignedp, &volatilep, false);
8955 if (TREE_CODE (base1) == INDIRECT_REF)
8956 base1 = TREE_OPERAND (base1, 0);
8957 else
8958 indirect_base1 = true;
8960 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8962 base1 = TREE_OPERAND (arg1, 0);
8963 STRIP_SIGN_NOPS (base1);
8964 if (TREE_CODE (base1) == ADDR_EXPR)
8966 base1 = TREE_OPERAND (base1, 0);
8967 indirect_base1 = true;
8969 offset1 = TREE_OPERAND (arg1, 1);
8970 if (host_integerp (offset1, 0))
8972 HOST_WIDE_INT off = size_low_cst (offset1);
8973 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8974 * BITS_PER_UNIT)
8975 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8977 bitpos1 = off * BITS_PER_UNIT;
8978 offset1 = NULL_TREE;
8983 /* A local variable can never be pointed to by
8984 the default SSA name of an incoming parameter. */
8985 if ((TREE_CODE (arg0) == ADDR_EXPR
8986 && indirect_base0
8987 && TREE_CODE (base0) == VAR_DECL
8988 && auto_var_in_fn_p (base0, current_function_decl)
8989 && !indirect_base1
8990 && TREE_CODE (base1) == SSA_NAME
8991 && SSA_NAME_IS_DEFAULT_DEF (base1)
8992 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8993 || (TREE_CODE (arg1) == ADDR_EXPR
8994 && indirect_base1
8995 && TREE_CODE (base1) == VAR_DECL
8996 && auto_var_in_fn_p (base1, current_function_decl)
8997 && !indirect_base0
8998 && TREE_CODE (base0) == SSA_NAME
8999 && SSA_NAME_IS_DEFAULT_DEF (base0)
9000 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9002 if (code == NE_EXPR)
9003 return constant_boolean_node (1, type);
9004 else if (code == EQ_EXPR)
9005 return constant_boolean_node (0, type);
9007 /* If we have equivalent bases we might be able to simplify. */
9008 else if (indirect_base0 == indirect_base1
9009 && operand_equal_p (base0, base1, 0))
9011 /* We can fold this expression to a constant if the non-constant
9012 offset parts are equal. */
9013 if ((offset0 == offset1
9014 || (offset0 && offset1
9015 && operand_equal_p (offset0, offset1, 0)))
9016 && (code == EQ_EXPR
9017 || code == NE_EXPR
9018 || (indirect_base0 && DECL_P (base0))
9019 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9022 if (code != EQ_EXPR
9023 && code != NE_EXPR
9024 && bitpos0 != bitpos1
9025 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9026 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9027 fold_overflow_warning (("assuming pointer wraparound does not "
9028 "occur when comparing P +- C1 with "
9029 "P +- C2"),
9030 WARN_STRICT_OVERFLOW_CONDITIONAL);
9032 switch (code)
9034 case EQ_EXPR:
9035 return constant_boolean_node (bitpos0 == bitpos1, type);
9036 case NE_EXPR:
9037 return constant_boolean_node (bitpos0 != bitpos1, type);
9038 case LT_EXPR:
9039 return constant_boolean_node (bitpos0 < bitpos1, type);
9040 case LE_EXPR:
9041 return constant_boolean_node (bitpos0 <= bitpos1, type);
9042 case GE_EXPR:
9043 return constant_boolean_node (bitpos0 >= bitpos1, type);
9044 case GT_EXPR:
9045 return constant_boolean_node (bitpos0 > bitpos1, type);
9046 default:;
9049 /* We can simplify the comparison to a comparison of the variable
9050 offset parts if the constant offset parts are equal.
9051 Be careful to use signed sizetype here because otherwise we
9052 mess with array offsets in the wrong way. This is possible
9053 because pointer arithmetic is restricted to retain within an
9054 object and overflow on pointer differences is undefined as of
9055 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9056 else if (bitpos0 == bitpos1
9057 && ((code == EQ_EXPR || code == NE_EXPR)
9058 || (indirect_base0 && DECL_P (base0))
9059 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9061 /* By converting to signed sizetype we cover middle-end pointer
9062 arithmetic which operates on unsigned pointer types of size
9063 type size and ARRAY_REF offsets which are properly sign or
9064 zero extended from their type in case it is narrower than
9065 sizetype. */
9066 if (offset0 == NULL_TREE)
9067 offset0 = build_int_cst (ssizetype, 0);
9068 else
9069 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9070 if (offset1 == NULL_TREE)
9071 offset1 = build_int_cst (ssizetype, 0);
9072 else
9073 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9075 if (code != EQ_EXPR
9076 && code != NE_EXPR
9077 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9078 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9079 fold_overflow_warning (("assuming pointer wraparound does not "
9080 "occur when comparing P +- C1 with "
9081 "P +- C2"),
9082 WARN_STRICT_OVERFLOW_COMPARISON);
9084 return fold_build2_loc (loc, code, type, offset0, offset1);
9087 /* For non-equal bases we can simplify if they are addresses
9088 of local binding decls or constants. */
9089 else if (indirect_base0 && indirect_base1
9090 /* We know that !operand_equal_p (base0, base1, 0)
9091 because the if condition was false. But make
9092 sure two decls are not the same. */
9093 && base0 != base1
9094 && TREE_CODE (arg0) == ADDR_EXPR
9095 && TREE_CODE (arg1) == ADDR_EXPR
9096 && (((TREE_CODE (base0) == VAR_DECL
9097 || TREE_CODE (base0) == PARM_DECL)
9098 && (targetm.binds_local_p (base0)
9099 || CONSTANT_CLASS_P (base1)))
9100 || CONSTANT_CLASS_P (base0))
9101 && (((TREE_CODE (base1) == VAR_DECL
9102 || TREE_CODE (base1) == PARM_DECL)
9103 && (targetm.binds_local_p (base1)
9104 || CONSTANT_CLASS_P (base0)))
9105 || CONSTANT_CLASS_P (base1)))
9107 if (code == EQ_EXPR)
9108 return omit_two_operands_loc (loc, type, boolean_false_node,
9109 arg0, arg1);
9110 else if (code == NE_EXPR)
9111 return omit_two_operands_loc (loc, type, boolean_true_node,
9112 arg0, arg1);
9114 /* For equal offsets we can simplify to a comparison of the
9115 base addresses. */
9116 else if (bitpos0 == bitpos1
9117 && (indirect_base0
9118 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9119 && (indirect_base1
9120 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9121 && ((offset0 == offset1)
9122 || (offset0 && offset1
9123 && operand_equal_p (offset0, offset1, 0))))
9125 if (indirect_base0)
9126 base0 = build_fold_addr_expr_loc (loc, base0);
9127 if (indirect_base1)
9128 base1 = build_fold_addr_expr_loc (loc, base1);
9129 return fold_build2_loc (loc, code, type, base0, base1);
9133 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9134 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9135 the resulting offset is smaller in absolute value than the
9136 original one. */
9137 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9138 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9139 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9140 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9141 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9142 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9143 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9145 tree const1 = TREE_OPERAND (arg0, 1);
9146 tree const2 = TREE_OPERAND (arg1, 1);
9147 tree variable1 = TREE_OPERAND (arg0, 0);
9148 tree variable2 = TREE_OPERAND (arg1, 0);
9149 tree cst;
9150 const char * const warnmsg = G_("assuming signed overflow does not "
9151 "occur when combining constants around "
9152 "a comparison");
9154 /* Put the constant on the side where it doesn't overflow and is
9155 of lower absolute value than before. */
9156 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9157 ? MINUS_EXPR : PLUS_EXPR,
9158 const2, const1);
9159 if (!TREE_OVERFLOW (cst)
9160 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9162 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9163 return fold_build2_loc (loc, code, type,
9164 variable1,
9165 fold_build2_loc (loc,
9166 TREE_CODE (arg1), TREE_TYPE (arg1),
9167 variable2, cst));
9170 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9171 ? MINUS_EXPR : PLUS_EXPR,
9172 const1, const2);
9173 if (!TREE_OVERFLOW (cst)
9174 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9176 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9177 return fold_build2_loc (loc, code, type,
9178 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9179 variable1, cst),
9180 variable2);
9184 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9185 signed arithmetic case. That form is created by the compiler
9186 often enough for folding it to be of value. One example is in
9187 computing loop trip counts after Operator Strength Reduction. */
9188 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9189 && TREE_CODE (arg0) == MULT_EXPR
9190 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9191 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9192 && integer_zerop (arg1))
9194 tree const1 = TREE_OPERAND (arg0, 1);
9195 tree const2 = arg1; /* zero */
9196 tree variable1 = TREE_OPERAND (arg0, 0);
9197 enum tree_code cmp_code = code;
9199 /* Handle unfolded multiplication by zero. */
9200 if (integer_zerop (const1))
9201 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9203 fold_overflow_warning (("assuming signed overflow does not occur when "
9204 "eliminating multiplication in comparison "
9205 "with zero"),
9206 WARN_STRICT_OVERFLOW_COMPARISON);
9208 /* If const1 is negative we swap the sense of the comparison. */
9209 if (tree_int_cst_sgn (const1) < 0)
9210 cmp_code = swap_tree_comparison (cmp_code);
9212 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9215 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9216 if (tem)
9217 return tem;
9219 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9221 tree targ0 = strip_float_extensions (arg0);
9222 tree targ1 = strip_float_extensions (arg1);
9223 tree newtype = TREE_TYPE (targ0);
9225 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9226 newtype = TREE_TYPE (targ1);
9228 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9229 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9230 return fold_build2_loc (loc, code, type,
9231 fold_convert_loc (loc, newtype, targ0),
9232 fold_convert_loc (loc, newtype, targ1));
9234 /* (-a) CMP (-b) -> b CMP a */
9235 if (TREE_CODE (arg0) == NEGATE_EXPR
9236 && TREE_CODE (arg1) == NEGATE_EXPR)
9237 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9238 TREE_OPERAND (arg0, 0));
9240 if (TREE_CODE (arg1) == REAL_CST)
9242 REAL_VALUE_TYPE cst;
9243 cst = TREE_REAL_CST (arg1);
9245 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9246 if (TREE_CODE (arg0) == NEGATE_EXPR)
9247 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9248 TREE_OPERAND (arg0, 0),
9249 build_real (TREE_TYPE (arg1),
9250 real_value_negate (&cst)));
9252 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9253 /* a CMP (-0) -> a CMP 0 */
9254 if (REAL_VALUE_MINUS_ZERO (cst))
9255 return fold_build2_loc (loc, code, type, arg0,
9256 build_real (TREE_TYPE (arg1), dconst0));
9258 /* x != NaN is always true, other ops are always false. */
9259 if (REAL_VALUE_ISNAN (cst)
9260 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9262 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9263 return omit_one_operand_loc (loc, type, tem, arg0);
9266 /* Fold comparisons against infinity. */
9267 if (REAL_VALUE_ISINF (cst)
9268 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9270 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9271 if (tem != NULL_TREE)
9272 return tem;
9276 /* If this is a comparison of a real constant with a PLUS_EXPR
9277 or a MINUS_EXPR of a real constant, we can convert it into a
9278 comparison with a revised real constant as long as no overflow
9279 occurs when unsafe_math_optimizations are enabled. */
9280 if (flag_unsafe_math_optimizations
9281 && TREE_CODE (arg1) == REAL_CST
9282 && (TREE_CODE (arg0) == PLUS_EXPR
9283 || TREE_CODE (arg0) == MINUS_EXPR)
9284 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9285 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9286 ? MINUS_EXPR : PLUS_EXPR,
9287 arg1, TREE_OPERAND (arg0, 1)))
9288 && !TREE_OVERFLOW (tem))
9289 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9291 /* Likewise, we can simplify a comparison of a real constant with
9292 a MINUS_EXPR whose first operand is also a real constant, i.e.
9293 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9294 floating-point types only if -fassociative-math is set. */
9295 if (flag_associative_math
9296 && TREE_CODE (arg1) == REAL_CST
9297 && TREE_CODE (arg0) == MINUS_EXPR
9298 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9299 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9300 arg1))
9301 && !TREE_OVERFLOW (tem))
9302 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9303 TREE_OPERAND (arg0, 1), tem);
9305 /* Fold comparisons against built-in math functions. */
9306 if (TREE_CODE (arg1) == REAL_CST
9307 && flag_unsafe_math_optimizations
9308 && ! flag_errno_math)
9310 enum built_in_function fcode = builtin_mathfn_code (arg0);
9312 if (fcode != END_BUILTINS)
9314 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9315 if (tem != NULL_TREE)
9316 return tem;
9321 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9322 && CONVERT_EXPR_P (arg0))
9324 /* If we are widening one operand of an integer comparison,
9325 see if the other operand is similarly being widened. Perhaps we
9326 can do the comparison in the narrower type. */
9327 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9328 if (tem)
9329 return tem;
9331 /* Or if we are changing signedness. */
9332 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9333 if (tem)
9334 return tem;
9337 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9338 constant, we can simplify it. */
9339 if (TREE_CODE (arg1) == INTEGER_CST
9340 && (TREE_CODE (arg0) == MIN_EXPR
9341 || TREE_CODE (arg0) == MAX_EXPR)
9342 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9344 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9345 if (tem)
9346 return tem;
9349 /* Simplify comparison of something with itself. (For IEEE
9350 floating-point, we can only do some of these simplifications.) */
9351 if (operand_equal_p (arg0, arg1, 0))
9353 switch (code)
9355 case EQ_EXPR:
9356 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9357 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9358 return constant_boolean_node (1, type);
9359 break;
9361 case GE_EXPR:
9362 case LE_EXPR:
9363 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9364 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9365 return constant_boolean_node (1, type);
9366 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9368 case NE_EXPR:
9369 /* For NE, we can only do this simplification if integer
9370 or we don't honor IEEE floating point NaNs. */
9371 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9372 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9373 break;
9374 /* ... fall through ... */
9375 case GT_EXPR:
9376 case LT_EXPR:
9377 return constant_boolean_node (0, type);
9378 default:
9379 gcc_unreachable ();
9383 /* If we are comparing an expression that just has comparisons
9384 of two integer values, arithmetic expressions of those comparisons,
9385 and constants, we can simplify it. There are only three cases
9386 to check: the two values can either be equal, the first can be
9387 greater, or the second can be greater. Fold the expression for
9388 those three values. Since each value must be 0 or 1, we have
9389 eight possibilities, each of which corresponds to the constant 0
9390 or 1 or one of the six possible comparisons.
9392 This handles common cases like (a > b) == 0 but also handles
9393 expressions like ((x > y) - (y > x)) > 0, which supposedly
9394 occur in macroized code. */
9396 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9398 tree cval1 = 0, cval2 = 0;
9399 int save_p = 0;
9401 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9402 /* Don't handle degenerate cases here; they should already
9403 have been handled anyway. */
9404 && cval1 != 0 && cval2 != 0
9405 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9406 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9407 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9408 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9409 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9410 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9411 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9413 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9414 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9416 /* We can't just pass T to eval_subst in case cval1 or cval2
9417 was the same as ARG1. */
9419 tree high_result
9420 = fold_build2_loc (loc, code, type,
9421 eval_subst (loc, arg0, cval1, maxval,
9422 cval2, minval),
9423 arg1);
9424 tree equal_result
9425 = fold_build2_loc (loc, code, type,
9426 eval_subst (loc, arg0, cval1, maxval,
9427 cval2, maxval),
9428 arg1);
9429 tree low_result
9430 = fold_build2_loc (loc, code, type,
9431 eval_subst (loc, arg0, cval1, minval,
9432 cval2, maxval),
9433 arg1);
9435 /* All three of these results should be 0 or 1. Confirm they are.
9436 Then use those values to select the proper code to use. */
9438 if (TREE_CODE (high_result) == INTEGER_CST
9439 && TREE_CODE (equal_result) == INTEGER_CST
9440 && TREE_CODE (low_result) == INTEGER_CST)
9442 /* Make a 3-bit mask with the high-order bit being the
9443 value for `>', the next for '=', and the low for '<'. */
9444 switch ((integer_onep (high_result) * 4)
9445 + (integer_onep (equal_result) * 2)
9446 + integer_onep (low_result))
9448 case 0:
9449 /* Always false. */
9450 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9451 case 1:
9452 code = LT_EXPR;
9453 break;
9454 case 2:
9455 code = EQ_EXPR;
9456 break;
9457 case 3:
9458 code = LE_EXPR;
9459 break;
9460 case 4:
9461 code = GT_EXPR;
9462 break;
9463 case 5:
9464 code = NE_EXPR;
9465 break;
9466 case 6:
9467 code = GE_EXPR;
9468 break;
9469 case 7:
9470 /* Always true. */
9471 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9474 if (save_p)
9476 tem = save_expr (build2 (code, type, cval1, cval2));
9477 SET_EXPR_LOCATION (tem, loc);
9478 return tem;
9480 return fold_build2_loc (loc, code, type, cval1, cval2);
9485 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9486 into a single range test. */
9487 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9488 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9489 && TREE_CODE (arg1) == INTEGER_CST
9490 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9491 && !integer_zerop (TREE_OPERAND (arg0, 1))
9492 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9493 && !TREE_OVERFLOW (arg1))
9495 tem = fold_div_compare (loc, code, type, arg0, arg1);
9496 if (tem != NULL_TREE)
9497 return tem;
9500 /* Fold ~X op ~Y as Y op X. */
9501 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9502 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9504 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9505 return fold_build2_loc (loc, code, type,
9506 fold_convert_loc (loc, cmp_type,
9507 TREE_OPERAND (arg1, 0)),
9508 TREE_OPERAND (arg0, 0));
9511 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9512 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9513 && TREE_CODE (arg1) == INTEGER_CST)
9515 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9516 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9517 TREE_OPERAND (arg0, 0),
9518 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9519 fold_convert_loc (loc, cmp_type, arg1)));
9522 return NULL_TREE;
9526 /* Subroutine of fold_binary. Optimize complex multiplications of the
9527 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9528 argument EXPR represents the expression "z" of type TYPE. */
9530 static tree
9531 fold_mult_zconjz (location_t loc, tree type, tree expr)
9533 tree itype = TREE_TYPE (type);
9534 tree rpart, ipart, tem;
9536 if (TREE_CODE (expr) == COMPLEX_EXPR)
9538 rpart = TREE_OPERAND (expr, 0);
9539 ipart = TREE_OPERAND (expr, 1);
9541 else if (TREE_CODE (expr) == COMPLEX_CST)
9543 rpart = TREE_REALPART (expr);
9544 ipart = TREE_IMAGPART (expr);
9546 else
9548 expr = save_expr (expr);
9549 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9550 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9553 rpart = save_expr (rpart);
9554 ipart = save_expr (ipart);
9555 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9556 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9557 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9558 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9559 build_zero_cst (itype));
9563 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9564 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9565 guarantees that P and N have the same least significant log2(M) bits.
9566 N is not otherwise constrained. In particular, N is not normalized to
9567 0 <= N < M as is common. In general, the precise value of P is unknown.
9568 M is chosen as large as possible such that constant N can be determined.
9570 Returns M and sets *RESIDUE to N.
9572 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9573 account. This is not always possible due to PR 35705.
9576 static unsigned HOST_WIDE_INT
9577 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9578 bool allow_func_align)
9580 enum tree_code code;
9582 *residue = 0;
9584 code = TREE_CODE (expr);
9585 if (code == ADDR_EXPR)
9587 unsigned int bitalign;
9588 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9589 *residue /= BITS_PER_UNIT;
9590 return bitalign / BITS_PER_UNIT;
9592 else if (code == POINTER_PLUS_EXPR)
9594 tree op0, op1;
9595 unsigned HOST_WIDE_INT modulus;
9596 enum tree_code inner_code;
9598 op0 = TREE_OPERAND (expr, 0);
9599 STRIP_NOPS (op0);
9600 modulus = get_pointer_modulus_and_residue (op0, residue,
9601 allow_func_align);
9603 op1 = TREE_OPERAND (expr, 1);
9604 STRIP_NOPS (op1);
9605 inner_code = TREE_CODE (op1);
9606 if (inner_code == INTEGER_CST)
9608 *residue += TREE_INT_CST_LOW (op1);
9609 return modulus;
9611 else if (inner_code == MULT_EXPR)
9613 op1 = TREE_OPERAND (op1, 1);
9614 if (TREE_CODE (op1) == INTEGER_CST)
9616 unsigned HOST_WIDE_INT align;
9618 /* Compute the greatest power-of-2 divisor of op1. */
9619 align = TREE_INT_CST_LOW (op1);
9620 align &= -align;
9622 /* If align is non-zero and less than *modulus, replace
9623 *modulus with align., If align is 0, then either op1 is 0
9624 or the greatest power-of-2 divisor of op1 doesn't fit in an
9625 unsigned HOST_WIDE_INT. In either case, no additional
9626 constraint is imposed. */
9627 if (align)
9628 modulus = MIN (modulus, align);
9630 return modulus;
9635 /* If we get here, we were unable to determine anything useful about the
9636 expression. */
9637 return 1;
9640 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9641 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9643 static bool
9644 vec_cst_ctor_to_array (tree arg, tree *elts)
9646 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9648 if (TREE_CODE (arg) == VECTOR_CST)
9650 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9651 elts[i] = VECTOR_CST_ELT (arg, i);
9653 else if (TREE_CODE (arg) == CONSTRUCTOR)
9655 constructor_elt *elt;
9657 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9658 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9659 return false;
9660 else
9661 elts[i] = elt->value;
9663 else
9664 return false;
9665 for (; i < nelts; i++)
9666 elts[i]
9667 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9668 return true;
9671 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9672 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9673 NULL_TREE otherwise. */
9675 static tree
9676 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9678 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9679 tree *elts;
9680 bool need_ctor = false;
9682 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9683 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9684 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9685 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9686 return NULL_TREE;
9688 elts = XALLOCAVEC (tree, nelts * 3);
9689 if (!vec_cst_ctor_to_array (arg0, elts)
9690 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9691 return NULL_TREE;
9693 for (i = 0; i < nelts; i++)
9695 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9696 need_ctor = true;
9697 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9700 if (need_ctor)
9702 vec<constructor_elt, va_gc> *v;
9703 vec_alloc (v, nelts);
9704 for (i = 0; i < nelts; i++)
9705 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9706 return build_constructor (type, v);
9708 else
9709 return build_vector (type, &elts[2 * nelts]);
9712 /* Try to fold a pointer difference of type TYPE two address expressions of
9713 array references AREF0 and AREF1 using location LOC. Return a
9714 simplified expression for the difference or NULL_TREE. */
9716 static tree
9717 fold_addr_of_array_ref_difference (location_t loc, tree type,
9718 tree aref0, tree aref1)
9720 tree base0 = TREE_OPERAND (aref0, 0);
9721 tree base1 = TREE_OPERAND (aref1, 0);
9722 tree base_offset = build_int_cst (type, 0);
9724 /* If the bases are array references as well, recurse. If the bases
9725 are pointer indirections compute the difference of the pointers.
9726 If the bases are equal, we are set. */
9727 if ((TREE_CODE (base0) == ARRAY_REF
9728 && TREE_CODE (base1) == ARRAY_REF
9729 && (base_offset
9730 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9731 || (INDIRECT_REF_P (base0)
9732 && INDIRECT_REF_P (base1)
9733 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9734 TREE_OPERAND (base0, 0),
9735 TREE_OPERAND (base1, 0))))
9736 || operand_equal_p (base0, base1, 0))
9738 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9739 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9740 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9741 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9742 return fold_build2_loc (loc, PLUS_EXPR, type,
9743 base_offset,
9744 fold_build2_loc (loc, MULT_EXPR, type,
9745 diff, esz));
9747 return NULL_TREE;
9750 /* If the real or vector real constant CST of type TYPE has an exact
9751 inverse, return it, else return NULL. */
9753 static tree
9754 exact_inverse (tree type, tree cst)
9756 REAL_VALUE_TYPE r;
9757 tree unit_type, *elts;
9758 enum machine_mode mode;
9759 unsigned vec_nelts, i;
9761 switch (TREE_CODE (cst))
9763 case REAL_CST:
9764 r = TREE_REAL_CST (cst);
9766 if (exact_real_inverse (TYPE_MODE (type), &r))
9767 return build_real (type, r);
9769 return NULL_TREE;
9771 case VECTOR_CST:
9772 vec_nelts = VECTOR_CST_NELTS (cst);
9773 elts = XALLOCAVEC (tree, vec_nelts);
9774 unit_type = TREE_TYPE (type);
9775 mode = TYPE_MODE (unit_type);
9777 for (i = 0; i < vec_nelts; i++)
9779 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9780 if (!exact_real_inverse (mode, &r))
9781 return NULL_TREE;
9782 elts[i] = build_real (unit_type, r);
9785 return build_vector (type, elts);
9787 default:
9788 return NULL_TREE;
9792 /* Fold a binary expression of code CODE and type TYPE with operands
9793 OP0 and OP1. LOC is the location of the resulting expression.
9794 Return the folded expression if folding is successful. Otherwise,
9795 return NULL_TREE. */
9797 tree
9798 fold_binary_loc (location_t loc,
9799 enum tree_code code, tree type, tree op0, tree op1)
9801 enum tree_code_class kind = TREE_CODE_CLASS (code);
9802 tree arg0, arg1, tem;
9803 tree t1 = NULL_TREE;
9804 bool strict_overflow_p;
9806 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9807 && TREE_CODE_LENGTH (code) == 2
9808 && op0 != NULL_TREE
9809 && op1 != NULL_TREE);
9811 arg0 = op0;
9812 arg1 = op1;
9814 /* Strip any conversions that don't change the mode. This is
9815 safe for every expression, except for a comparison expression
9816 because its signedness is derived from its operands. So, in
9817 the latter case, only strip conversions that don't change the
9818 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9819 preserved.
9821 Note that this is done as an internal manipulation within the
9822 constant folder, in order to find the simplest representation
9823 of the arguments so that their form can be studied. In any
9824 cases, the appropriate type conversions should be put back in
9825 the tree that will get out of the constant folder. */
9827 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9829 STRIP_SIGN_NOPS (arg0);
9830 STRIP_SIGN_NOPS (arg1);
9832 else
9834 STRIP_NOPS (arg0);
9835 STRIP_NOPS (arg1);
9838 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9839 constant but we can't do arithmetic on them. */
9840 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9841 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9842 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9843 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9844 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9845 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9847 if (kind == tcc_binary)
9849 /* Make sure type and arg0 have the same saturating flag. */
9850 gcc_assert (TYPE_SATURATING (type)
9851 == TYPE_SATURATING (TREE_TYPE (arg0)));
9852 tem = const_binop (code, arg0, arg1);
9854 else if (kind == tcc_comparison)
9855 tem = fold_relational_const (code, type, arg0, arg1);
9856 else
9857 tem = NULL_TREE;
9859 if (tem != NULL_TREE)
9861 if (TREE_TYPE (tem) != type)
9862 tem = fold_convert_loc (loc, type, tem);
9863 return tem;
9867 /* If this is a commutative operation, and ARG0 is a constant, move it
9868 to ARG1 to reduce the number of tests below. */
9869 if (commutative_tree_code (code)
9870 && tree_swap_operands_p (arg0, arg1, true))
9871 return fold_build2_loc (loc, code, type, op1, op0);
9873 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9875 First check for cases where an arithmetic operation is applied to a
9876 compound, conditional, or comparison operation. Push the arithmetic
9877 operation inside the compound or conditional to see if any folding
9878 can then be done. Convert comparison to conditional for this purpose.
9879 The also optimizes non-constant cases that used to be done in
9880 expand_expr.
9882 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9883 one of the operands is a comparison and the other is a comparison, a
9884 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9885 code below would make the expression more complex. Change it to a
9886 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9887 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9889 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9890 || code == EQ_EXPR || code == NE_EXPR)
9891 && TREE_CODE (type) != VECTOR_TYPE
9892 && ((truth_value_p (TREE_CODE (arg0))
9893 && (truth_value_p (TREE_CODE (arg1))
9894 || (TREE_CODE (arg1) == BIT_AND_EXPR
9895 && integer_onep (TREE_OPERAND (arg1, 1)))))
9896 || (truth_value_p (TREE_CODE (arg1))
9897 && (truth_value_p (TREE_CODE (arg0))
9898 || (TREE_CODE (arg0) == BIT_AND_EXPR
9899 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9901 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9902 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9903 : TRUTH_XOR_EXPR,
9904 boolean_type_node,
9905 fold_convert_loc (loc, boolean_type_node, arg0),
9906 fold_convert_loc (loc, boolean_type_node, arg1));
9908 if (code == EQ_EXPR)
9909 tem = invert_truthvalue_loc (loc, tem);
9911 return fold_convert_loc (loc, type, tem);
9914 if (TREE_CODE_CLASS (code) == tcc_binary
9915 || TREE_CODE_CLASS (code) == tcc_comparison)
9917 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9919 tem = fold_build2_loc (loc, code, type,
9920 fold_convert_loc (loc, TREE_TYPE (op0),
9921 TREE_OPERAND (arg0, 1)), op1);
9922 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9923 tem);
9925 if (TREE_CODE (arg1) == COMPOUND_EXPR
9926 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9928 tem = fold_build2_loc (loc, code, type, op0,
9929 fold_convert_loc (loc, TREE_TYPE (op1),
9930 TREE_OPERAND (arg1, 1)));
9931 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9932 tem);
9935 if (TREE_CODE (arg0) == COND_EXPR
9936 || TREE_CODE (arg0) == VEC_COND_EXPR
9937 || COMPARISON_CLASS_P (arg0))
9939 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9940 arg0, arg1,
9941 /*cond_first_p=*/1);
9942 if (tem != NULL_TREE)
9943 return tem;
9946 if (TREE_CODE (arg1) == COND_EXPR
9947 || TREE_CODE (arg1) == VEC_COND_EXPR
9948 || COMPARISON_CLASS_P (arg1))
9950 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9951 arg1, arg0,
9952 /*cond_first_p=*/0);
9953 if (tem != NULL_TREE)
9954 return tem;
9958 switch (code)
9960 case MEM_REF:
9961 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9962 if (TREE_CODE (arg0) == ADDR_EXPR
9963 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9965 tree iref = TREE_OPERAND (arg0, 0);
9966 return fold_build2 (MEM_REF, type,
9967 TREE_OPERAND (iref, 0),
9968 int_const_binop (PLUS_EXPR, arg1,
9969 TREE_OPERAND (iref, 1)));
9972 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9973 if (TREE_CODE (arg0) == ADDR_EXPR
9974 && handled_component_p (TREE_OPERAND (arg0, 0)))
9976 tree base;
9977 HOST_WIDE_INT coffset;
9978 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9979 &coffset);
9980 if (!base)
9981 return NULL_TREE;
9982 return fold_build2 (MEM_REF, type,
9983 build_fold_addr_expr (base),
9984 int_const_binop (PLUS_EXPR, arg1,
9985 size_int (coffset)));
9988 return NULL_TREE;
9990 case POINTER_PLUS_EXPR:
9991 /* 0 +p index -> (type)index */
9992 if (integer_zerop (arg0))
9993 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9995 /* PTR +p 0 -> PTR */
9996 if (integer_zerop (arg1))
9997 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9999 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10000 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10001 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10002 return fold_convert_loc (loc, type,
10003 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10004 fold_convert_loc (loc, sizetype,
10005 arg1),
10006 fold_convert_loc (loc, sizetype,
10007 arg0)));
10009 /* (PTR +p B) +p A -> PTR +p (B + A) */
10010 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10012 tree inner;
10013 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10014 tree arg00 = TREE_OPERAND (arg0, 0);
10015 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10016 arg01, fold_convert_loc (loc, sizetype, arg1));
10017 return fold_convert_loc (loc, type,
10018 fold_build_pointer_plus_loc (loc,
10019 arg00, inner));
10022 /* PTR_CST +p CST -> CST1 */
10023 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10024 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10025 fold_convert_loc (loc, type, arg1));
10027 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10028 of the array. Loop optimizer sometimes produce this type of
10029 expressions. */
10030 if (TREE_CODE (arg0) == ADDR_EXPR)
10032 tem = try_move_mult_to_index (loc, arg0,
10033 fold_convert_loc (loc,
10034 ssizetype, arg1));
10035 if (tem)
10036 return fold_convert_loc (loc, type, tem);
10039 return NULL_TREE;
10041 case PLUS_EXPR:
10042 /* A + (-B) -> A - B */
10043 if (TREE_CODE (arg1) == NEGATE_EXPR)
10044 return fold_build2_loc (loc, MINUS_EXPR, type,
10045 fold_convert_loc (loc, type, arg0),
10046 fold_convert_loc (loc, type,
10047 TREE_OPERAND (arg1, 0)));
10048 /* (-A) + B -> B - A */
10049 if (TREE_CODE (arg0) == NEGATE_EXPR
10050 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10051 return fold_build2_loc (loc, MINUS_EXPR, type,
10052 fold_convert_loc (loc, type, arg1),
10053 fold_convert_loc (loc, type,
10054 TREE_OPERAND (arg0, 0)));
10056 if (INTEGRAL_TYPE_P (type))
10058 /* Convert ~A + 1 to -A. */
10059 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10060 && integer_onep (arg1))
10061 return fold_build1_loc (loc, NEGATE_EXPR, type,
10062 fold_convert_loc (loc, type,
10063 TREE_OPERAND (arg0, 0)));
10065 /* ~X + X is -1. */
10066 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10067 && !TYPE_OVERFLOW_TRAPS (type))
10069 tree tem = TREE_OPERAND (arg0, 0);
10071 STRIP_NOPS (tem);
10072 if (operand_equal_p (tem, arg1, 0))
10074 t1 = build_int_cst_type (type, -1);
10075 return omit_one_operand_loc (loc, type, t1, arg1);
10079 /* X + ~X is -1. */
10080 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10081 && !TYPE_OVERFLOW_TRAPS (type))
10083 tree tem = TREE_OPERAND (arg1, 0);
10085 STRIP_NOPS (tem);
10086 if (operand_equal_p (arg0, tem, 0))
10088 t1 = build_int_cst_type (type, -1);
10089 return omit_one_operand_loc (loc, type, t1, arg0);
10093 /* X + (X / CST) * -CST is X % CST. */
10094 if (TREE_CODE (arg1) == MULT_EXPR
10095 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10096 && operand_equal_p (arg0,
10097 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10099 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10100 tree cst1 = TREE_OPERAND (arg1, 1);
10101 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10102 cst1, cst0);
10103 if (sum && integer_zerop (sum))
10104 return fold_convert_loc (loc, type,
10105 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10106 TREE_TYPE (arg0), arg0,
10107 cst0));
10111 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10112 one. Make sure the type is not saturating and has the signedness of
10113 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10114 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10115 if ((TREE_CODE (arg0) == MULT_EXPR
10116 || TREE_CODE (arg1) == MULT_EXPR)
10117 && !TYPE_SATURATING (type)
10118 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10119 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10120 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10122 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10123 if (tem)
10124 return tem;
10127 if (! FLOAT_TYPE_P (type))
10129 if (integer_zerop (arg1))
10130 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10132 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10133 with a constant, and the two constants have no bits in common,
10134 we should treat this as a BIT_IOR_EXPR since this may produce more
10135 simplifications. */
10136 if (TREE_CODE (arg0) == BIT_AND_EXPR
10137 && TREE_CODE (arg1) == BIT_AND_EXPR
10138 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10139 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10140 && integer_zerop (const_binop (BIT_AND_EXPR,
10141 TREE_OPERAND (arg0, 1),
10142 TREE_OPERAND (arg1, 1))))
10144 code = BIT_IOR_EXPR;
10145 goto bit_ior;
10148 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10149 (plus (plus (mult) (mult)) (foo)) so that we can
10150 take advantage of the factoring cases below. */
10151 if (TYPE_OVERFLOW_WRAPS (type)
10152 && (((TREE_CODE (arg0) == PLUS_EXPR
10153 || TREE_CODE (arg0) == MINUS_EXPR)
10154 && TREE_CODE (arg1) == MULT_EXPR)
10155 || ((TREE_CODE (arg1) == PLUS_EXPR
10156 || TREE_CODE (arg1) == MINUS_EXPR)
10157 && TREE_CODE (arg0) == MULT_EXPR)))
10159 tree parg0, parg1, parg, marg;
10160 enum tree_code pcode;
10162 if (TREE_CODE (arg1) == MULT_EXPR)
10163 parg = arg0, marg = arg1;
10164 else
10165 parg = arg1, marg = arg0;
10166 pcode = TREE_CODE (parg);
10167 parg0 = TREE_OPERAND (parg, 0);
10168 parg1 = TREE_OPERAND (parg, 1);
10169 STRIP_NOPS (parg0);
10170 STRIP_NOPS (parg1);
10172 if (TREE_CODE (parg0) == MULT_EXPR
10173 && TREE_CODE (parg1) != MULT_EXPR)
10174 return fold_build2_loc (loc, pcode, type,
10175 fold_build2_loc (loc, PLUS_EXPR, type,
10176 fold_convert_loc (loc, type,
10177 parg0),
10178 fold_convert_loc (loc, type,
10179 marg)),
10180 fold_convert_loc (loc, type, parg1));
10181 if (TREE_CODE (parg0) != MULT_EXPR
10182 && TREE_CODE (parg1) == MULT_EXPR)
10183 return
10184 fold_build2_loc (loc, PLUS_EXPR, type,
10185 fold_convert_loc (loc, type, parg0),
10186 fold_build2_loc (loc, pcode, type,
10187 fold_convert_loc (loc, type, marg),
10188 fold_convert_loc (loc, type,
10189 parg1)));
10192 else
10194 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10195 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10196 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10198 /* Likewise if the operands are reversed. */
10199 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10200 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10202 /* Convert X + -C into X - C. */
10203 if (TREE_CODE (arg1) == REAL_CST
10204 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10206 tem = fold_negate_const (arg1, type);
10207 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10208 return fold_build2_loc (loc, MINUS_EXPR, type,
10209 fold_convert_loc (loc, type, arg0),
10210 fold_convert_loc (loc, type, tem));
10213 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10214 to __complex__ ( x, y ). This is not the same for SNaNs or
10215 if signed zeros are involved. */
10216 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10217 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10218 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10220 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10221 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10222 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10223 bool arg0rz = false, arg0iz = false;
10224 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10225 || (arg0i && (arg0iz = real_zerop (arg0i))))
10227 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10228 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10229 if (arg0rz && arg1i && real_zerop (arg1i))
10231 tree rp = arg1r ? arg1r
10232 : build1 (REALPART_EXPR, rtype, arg1);
10233 tree ip = arg0i ? arg0i
10234 : build1 (IMAGPART_EXPR, rtype, arg0);
10235 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10237 else if (arg0iz && arg1r && real_zerop (arg1r))
10239 tree rp = arg0r ? arg0r
10240 : build1 (REALPART_EXPR, rtype, arg0);
10241 tree ip = arg1i ? arg1i
10242 : build1 (IMAGPART_EXPR, rtype, arg1);
10243 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10248 if (flag_unsafe_math_optimizations
10249 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10250 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10251 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10252 return tem;
10254 /* Convert x+x into x*2.0. */
10255 if (operand_equal_p (arg0, arg1, 0)
10256 && SCALAR_FLOAT_TYPE_P (type))
10257 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10258 build_real (type, dconst2));
10260 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10261 We associate floats only if the user has specified
10262 -fassociative-math. */
10263 if (flag_associative_math
10264 && TREE_CODE (arg1) == PLUS_EXPR
10265 && TREE_CODE (arg0) != MULT_EXPR)
10267 tree tree10 = TREE_OPERAND (arg1, 0);
10268 tree tree11 = TREE_OPERAND (arg1, 1);
10269 if (TREE_CODE (tree11) == MULT_EXPR
10270 && TREE_CODE (tree10) == MULT_EXPR)
10272 tree tree0;
10273 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10274 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10277 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10278 We associate floats only if the user has specified
10279 -fassociative-math. */
10280 if (flag_associative_math
10281 && TREE_CODE (arg0) == PLUS_EXPR
10282 && TREE_CODE (arg1) != MULT_EXPR)
10284 tree tree00 = TREE_OPERAND (arg0, 0);
10285 tree tree01 = TREE_OPERAND (arg0, 1);
10286 if (TREE_CODE (tree01) == MULT_EXPR
10287 && TREE_CODE (tree00) == MULT_EXPR)
10289 tree tree0;
10290 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10291 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10296 bit_rotate:
10297 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10298 is a rotate of A by C1 bits. */
10299 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10300 is a rotate of A by B bits. */
10302 enum tree_code code0, code1;
10303 tree rtype;
10304 code0 = TREE_CODE (arg0);
10305 code1 = TREE_CODE (arg1);
10306 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10307 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10308 && operand_equal_p (TREE_OPERAND (arg0, 0),
10309 TREE_OPERAND (arg1, 0), 0)
10310 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10311 TYPE_UNSIGNED (rtype))
10312 /* Only create rotates in complete modes. Other cases are not
10313 expanded properly. */
10314 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10316 tree tree01, tree11;
10317 enum tree_code code01, code11;
10319 tree01 = TREE_OPERAND (arg0, 1);
10320 tree11 = TREE_OPERAND (arg1, 1);
10321 STRIP_NOPS (tree01);
10322 STRIP_NOPS (tree11);
10323 code01 = TREE_CODE (tree01);
10324 code11 = TREE_CODE (tree11);
10325 if (code01 == INTEGER_CST
10326 && code11 == INTEGER_CST
10327 && TREE_INT_CST_HIGH (tree01) == 0
10328 && TREE_INT_CST_HIGH (tree11) == 0
10329 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10330 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10332 tem = build2_loc (loc, LROTATE_EXPR,
10333 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10334 TREE_OPERAND (arg0, 0),
10335 code0 == LSHIFT_EXPR ? tree01 : tree11);
10336 return fold_convert_loc (loc, type, tem);
10338 else if (code11 == MINUS_EXPR)
10340 tree tree110, tree111;
10341 tree110 = TREE_OPERAND (tree11, 0);
10342 tree111 = TREE_OPERAND (tree11, 1);
10343 STRIP_NOPS (tree110);
10344 STRIP_NOPS (tree111);
10345 if (TREE_CODE (tree110) == INTEGER_CST
10346 && 0 == compare_tree_int (tree110,
10347 TYPE_PRECISION
10348 (TREE_TYPE (TREE_OPERAND
10349 (arg0, 0))))
10350 && operand_equal_p (tree01, tree111, 0))
10351 return
10352 fold_convert_loc (loc, type,
10353 build2 ((code0 == LSHIFT_EXPR
10354 ? LROTATE_EXPR
10355 : RROTATE_EXPR),
10356 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10357 TREE_OPERAND (arg0, 0), tree01));
10359 else if (code01 == MINUS_EXPR)
10361 tree tree010, tree011;
10362 tree010 = TREE_OPERAND (tree01, 0);
10363 tree011 = TREE_OPERAND (tree01, 1);
10364 STRIP_NOPS (tree010);
10365 STRIP_NOPS (tree011);
10366 if (TREE_CODE (tree010) == INTEGER_CST
10367 && 0 == compare_tree_int (tree010,
10368 TYPE_PRECISION
10369 (TREE_TYPE (TREE_OPERAND
10370 (arg0, 0))))
10371 && operand_equal_p (tree11, tree011, 0))
10372 return fold_convert_loc
10373 (loc, type,
10374 build2 ((code0 != LSHIFT_EXPR
10375 ? LROTATE_EXPR
10376 : RROTATE_EXPR),
10377 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10378 TREE_OPERAND (arg0, 0), tree11));
10383 associate:
10384 /* In most languages, can't associate operations on floats through
10385 parentheses. Rather than remember where the parentheses were, we
10386 don't associate floats at all, unless the user has specified
10387 -fassociative-math.
10388 And, we need to make sure type is not saturating. */
10390 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10391 && !TYPE_SATURATING (type))
10393 tree var0, con0, lit0, minus_lit0;
10394 tree var1, con1, lit1, minus_lit1;
10395 tree atype = type;
10396 bool ok = true;
10398 /* Split both trees into variables, constants, and literals. Then
10399 associate each group together, the constants with literals,
10400 then the result with variables. This increases the chances of
10401 literals being recombined later and of generating relocatable
10402 expressions for the sum of a constant and literal. */
10403 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10404 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10405 code == MINUS_EXPR);
10407 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10408 if (code == MINUS_EXPR)
10409 code = PLUS_EXPR;
10411 /* With undefined overflow prefer doing association in a type
10412 which wraps on overflow, if that is one of the operand types. */
10413 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10414 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10416 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10417 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10418 atype = TREE_TYPE (arg0);
10419 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10420 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10421 atype = TREE_TYPE (arg1);
10422 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10425 /* With undefined overflow we can only associate constants with one
10426 variable, and constants whose association doesn't overflow. */
10427 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10428 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10430 if (var0 && var1)
10432 tree tmp0 = var0;
10433 tree tmp1 = var1;
10435 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10436 tmp0 = TREE_OPERAND (tmp0, 0);
10437 if (CONVERT_EXPR_P (tmp0)
10438 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10439 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10440 <= TYPE_PRECISION (atype)))
10441 tmp0 = TREE_OPERAND (tmp0, 0);
10442 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10443 tmp1 = TREE_OPERAND (tmp1, 0);
10444 if (CONVERT_EXPR_P (tmp1)
10445 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10446 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10447 <= TYPE_PRECISION (atype)))
10448 tmp1 = TREE_OPERAND (tmp1, 0);
10449 /* The only case we can still associate with two variables
10450 is if they are the same, modulo negation and bit-pattern
10451 preserving conversions. */
10452 if (!operand_equal_p (tmp0, tmp1, 0))
10453 ok = false;
10457 /* Only do something if we found more than two objects. Otherwise,
10458 nothing has changed and we risk infinite recursion. */
10459 if (ok
10460 && (2 < ((var0 != 0) + (var1 != 0)
10461 + (con0 != 0) + (con1 != 0)
10462 + (lit0 != 0) + (lit1 != 0)
10463 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10465 bool any_overflows = false;
10466 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10467 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10468 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10469 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10470 var0 = associate_trees (loc, var0, var1, code, atype);
10471 con0 = associate_trees (loc, con0, con1, code, atype);
10472 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10473 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10474 code, atype);
10476 /* Preserve the MINUS_EXPR if the negative part of the literal is
10477 greater than the positive part. Otherwise, the multiplicative
10478 folding code (i.e extract_muldiv) may be fooled in case
10479 unsigned constants are subtracted, like in the following
10480 example: ((X*2 + 4) - 8U)/2. */
10481 if (minus_lit0 && lit0)
10483 if (TREE_CODE (lit0) == INTEGER_CST
10484 && TREE_CODE (minus_lit0) == INTEGER_CST
10485 && tree_int_cst_lt (lit0, minus_lit0))
10487 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10488 MINUS_EXPR, atype);
10489 lit0 = 0;
10491 else
10493 lit0 = associate_trees (loc, lit0, minus_lit0,
10494 MINUS_EXPR, atype);
10495 minus_lit0 = 0;
10499 /* Don't introduce overflows through reassociation. */
10500 if (!any_overflows
10501 && ((lit0 && TREE_OVERFLOW (lit0))
10502 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10503 return NULL_TREE;
10505 if (minus_lit0)
10507 if (con0 == 0)
10508 return
10509 fold_convert_loc (loc, type,
10510 associate_trees (loc, var0, minus_lit0,
10511 MINUS_EXPR, atype));
10512 else
10514 con0 = associate_trees (loc, con0, minus_lit0,
10515 MINUS_EXPR, atype);
10516 return
10517 fold_convert_loc (loc, type,
10518 associate_trees (loc, var0, con0,
10519 PLUS_EXPR, atype));
10523 con0 = associate_trees (loc, con0, lit0, code, atype);
10524 return
10525 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10526 code, atype));
10530 return NULL_TREE;
10532 case MINUS_EXPR:
10533 /* Pointer simplifications for subtraction, simple reassociations. */
10534 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10536 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10537 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10538 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10540 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10541 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10542 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10543 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10544 return fold_build2_loc (loc, PLUS_EXPR, type,
10545 fold_build2_loc (loc, MINUS_EXPR, type,
10546 arg00, arg10),
10547 fold_build2_loc (loc, MINUS_EXPR, type,
10548 arg01, arg11));
10550 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10551 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10553 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10554 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10555 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10556 fold_convert_loc (loc, type, arg1));
10557 if (tmp)
10558 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10561 /* A - (-B) -> A + B */
10562 if (TREE_CODE (arg1) == NEGATE_EXPR)
10563 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10564 fold_convert_loc (loc, type,
10565 TREE_OPERAND (arg1, 0)));
10566 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10567 if (TREE_CODE (arg0) == NEGATE_EXPR
10568 && (FLOAT_TYPE_P (type)
10569 || INTEGRAL_TYPE_P (type))
10570 && negate_expr_p (arg1)
10571 && reorder_operands_p (arg0, arg1))
10572 return fold_build2_loc (loc, MINUS_EXPR, type,
10573 fold_convert_loc (loc, type,
10574 negate_expr (arg1)),
10575 fold_convert_loc (loc, type,
10576 TREE_OPERAND (arg0, 0)));
10577 /* Convert -A - 1 to ~A. */
10578 if (INTEGRAL_TYPE_P (type)
10579 && TREE_CODE (arg0) == NEGATE_EXPR
10580 && integer_onep (arg1)
10581 && !TYPE_OVERFLOW_TRAPS (type))
10582 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10583 fold_convert_loc (loc, type,
10584 TREE_OPERAND (arg0, 0)));
10586 /* Convert -1 - A to ~A. */
10587 if (INTEGRAL_TYPE_P (type)
10588 && integer_all_onesp (arg0))
10589 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10592 /* X - (X / CST) * CST is X % CST. */
10593 if (INTEGRAL_TYPE_P (type)
10594 && TREE_CODE (arg1) == MULT_EXPR
10595 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10596 && operand_equal_p (arg0,
10597 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10598 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10599 TREE_OPERAND (arg1, 1), 0))
10600 return
10601 fold_convert_loc (loc, type,
10602 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10603 arg0, TREE_OPERAND (arg1, 1)));
10605 if (! FLOAT_TYPE_P (type))
10607 if (integer_zerop (arg0))
10608 return negate_expr (fold_convert_loc (loc, type, arg1));
10609 if (integer_zerop (arg1))
10610 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10612 /* Fold A - (A & B) into ~B & A. */
10613 if (!TREE_SIDE_EFFECTS (arg0)
10614 && TREE_CODE (arg1) == BIT_AND_EXPR)
10616 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10618 tree arg10 = fold_convert_loc (loc, type,
10619 TREE_OPERAND (arg1, 0));
10620 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10621 fold_build1_loc (loc, BIT_NOT_EXPR,
10622 type, arg10),
10623 fold_convert_loc (loc, type, arg0));
10625 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10627 tree arg11 = fold_convert_loc (loc,
10628 type, TREE_OPERAND (arg1, 1));
10629 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10630 fold_build1_loc (loc, BIT_NOT_EXPR,
10631 type, arg11),
10632 fold_convert_loc (loc, type, arg0));
10636 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10637 any power of 2 minus 1. */
10638 if (TREE_CODE (arg0) == BIT_AND_EXPR
10639 && TREE_CODE (arg1) == BIT_AND_EXPR
10640 && operand_equal_p (TREE_OPERAND (arg0, 0),
10641 TREE_OPERAND (arg1, 0), 0))
10643 tree mask0 = TREE_OPERAND (arg0, 1);
10644 tree mask1 = TREE_OPERAND (arg1, 1);
10645 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10647 if (operand_equal_p (tem, mask1, 0))
10649 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10650 TREE_OPERAND (arg0, 0), mask1);
10651 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10656 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10657 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10658 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10660 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10661 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10662 (-ARG1 + ARG0) reduces to -ARG1. */
10663 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10664 return negate_expr (fold_convert_loc (loc, type, arg1));
10666 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10667 __complex__ ( x, -y ). This is not the same for SNaNs or if
10668 signed zeros are involved. */
10669 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10670 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10671 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10673 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10674 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10675 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10676 bool arg0rz = false, arg0iz = false;
10677 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10678 || (arg0i && (arg0iz = real_zerop (arg0i))))
10680 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10681 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10682 if (arg0rz && arg1i && real_zerop (arg1i))
10684 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10685 arg1r ? arg1r
10686 : build1 (REALPART_EXPR, rtype, arg1));
10687 tree ip = arg0i ? arg0i
10688 : build1 (IMAGPART_EXPR, rtype, arg0);
10689 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10691 else if (arg0iz && arg1r && real_zerop (arg1r))
10693 tree rp = arg0r ? arg0r
10694 : build1 (REALPART_EXPR, rtype, arg0);
10695 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10696 arg1i ? arg1i
10697 : build1 (IMAGPART_EXPR, rtype, arg1));
10698 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10703 /* Fold &x - &x. This can happen from &x.foo - &x.
10704 This is unsafe for certain floats even in non-IEEE formats.
10705 In IEEE, it is unsafe because it does wrong for NaNs.
10706 Also note that operand_equal_p is always false if an operand
10707 is volatile. */
10709 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10710 && operand_equal_p (arg0, arg1, 0))
10711 return build_zero_cst (type);
10713 /* A - B -> A + (-B) if B is easily negatable. */
10714 if (negate_expr_p (arg1)
10715 && ((FLOAT_TYPE_P (type)
10716 /* Avoid this transformation if B is a positive REAL_CST. */
10717 && (TREE_CODE (arg1) != REAL_CST
10718 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10719 || INTEGRAL_TYPE_P (type)))
10720 return fold_build2_loc (loc, PLUS_EXPR, type,
10721 fold_convert_loc (loc, type, arg0),
10722 fold_convert_loc (loc, type,
10723 negate_expr (arg1)));
10725 /* Try folding difference of addresses. */
10727 HOST_WIDE_INT diff;
10729 if ((TREE_CODE (arg0) == ADDR_EXPR
10730 || TREE_CODE (arg1) == ADDR_EXPR)
10731 && ptr_difference_const (arg0, arg1, &diff))
10732 return build_int_cst_type (type, diff);
10735 /* Fold &a[i] - &a[j] to i-j. */
10736 if (TREE_CODE (arg0) == ADDR_EXPR
10737 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10738 && TREE_CODE (arg1) == ADDR_EXPR
10739 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10741 tree tem = fold_addr_of_array_ref_difference (loc, type,
10742 TREE_OPERAND (arg0, 0),
10743 TREE_OPERAND (arg1, 0));
10744 if (tem)
10745 return tem;
10748 if (FLOAT_TYPE_P (type)
10749 && flag_unsafe_math_optimizations
10750 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10751 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10752 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10753 return tem;
10755 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10756 one. Make sure the type is not saturating and has the signedness of
10757 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10758 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10759 if ((TREE_CODE (arg0) == MULT_EXPR
10760 || TREE_CODE (arg1) == MULT_EXPR)
10761 && !TYPE_SATURATING (type)
10762 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10763 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10764 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10766 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10767 if (tem)
10768 return tem;
10771 goto associate;
10773 case MULT_EXPR:
10774 /* (-A) * (-B) -> A * B */
10775 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10776 return fold_build2_loc (loc, MULT_EXPR, type,
10777 fold_convert_loc (loc, type,
10778 TREE_OPERAND (arg0, 0)),
10779 fold_convert_loc (loc, type,
10780 negate_expr (arg1)));
10781 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10782 return fold_build2_loc (loc, MULT_EXPR, type,
10783 fold_convert_loc (loc, type,
10784 negate_expr (arg0)),
10785 fold_convert_loc (loc, type,
10786 TREE_OPERAND (arg1, 0)));
10788 if (! FLOAT_TYPE_P (type))
10790 if (integer_zerop (arg1))
10791 return omit_one_operand_loc (loc, type, arg1, arg0);
10792 if (integer_onep (arg1))
10793 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10794 /* Transform x * -1 into -x. Make sure to do the negation
10795 on the original operand with conversions not stripped
10796 because we can only strip non-sign-changing conversions. */
10797 if (integer_all_onesp (arg1))
10798 return fold_convert_loc (loc, type, negate_expr (op0));
10799 /* Transform x * -C into -x * C if x is easily negatable. */
10800 if (TREE_CODE (arg1) == INTEGER_CST
10801 && tree_int_cst_sgn (arg1) == -1
10802 && negate_expr_p (arg0)
10803 && (tem = negate_expr (arg1)) != arg1
10804 && !TREE_OVERFLOW (tem))
10805 return fold_build2_loc (loc, MULT_EXPR, type,
10806 fold_convert_loc (loc, type,
10807 negate_expr (arg0)),
10808 tem);
10810 /* (a * (1 << b)) is (a << b) */
10811 if (TREE_CODE (arg1) == LSHIFT_EXPR
10812 && integer_onep (TREE_OPERAND (arg1, 0)))
10813 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10814 TREE_OPERAND (arg1, 1));
10815 if (TREE_CODE (arg0) == LSHIFT_EXPR
10816 && integer_onep (TREE_OPERAND (arg0, 0)))
10817 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10818 TREE_OPERAND (arg0, 1));
10820 /* (A + A) * C -> A * 2 * C */
10821 if (TREE_CODE (arg0) == PLUS_EXPR
10822 && TREE_CODE (arg1) == INTEGER_CST
10823 && operand_equal_p (TREE_OPERAND (arg0, 0),
10824 TREE_OPERAND (arg0, 1), 0))
10825 return fold_build2_loc (loc, MULT_EXPR, type,
10826 omit_one_operand_loc (loc, type,
10827 TREE_OPERAND (arg0, 0),
10828 TREE_OPERAND (arg0, 1)),
10829 fold_build2_loc (loc, MULT_EXPR, type,
10830 build_int_cst (type, 2) , arg1));
10832 strict_overflow_p = false;
10833 if (TREE_CODE (arg1) == INTEGER_CST
10834 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10835 &strict_overflow_p)))
10837 if (strict_overflow_p)
10838 fold_overflow_warning (("assuming signed overflow does not "
10839 "occur when simplifying "
10840 "multiplication"),
10841 WARN_STRICT_OVERFLOW_MISC);
10842 return fold_convert_loc (loc, type, tem);
10845 /* Optimize z * conj(z) for integer complex numbers. */
10846 if (TREE_CODE (arg0) == CONJ_EXPR
10847 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10848 return fold_mult_zconjz (loc, type, arg1);
10849 if (TREE_CODE (arg1) == CONJ_EXPR
10850 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10851 return fold_mult_zconjz (loc, type, arg0);
10853 else
10855 /* Maybe fold x * 0 to 0. The expressions aren't the same
10856 when x is NaN, since x * 0 is also NaN. Nor are they the
10857 same in modes with signed zeros, since multiplying a
10858 negative value by 0 gives -0, not +0. */
10859 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10860 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10861 && real_zerop (arg1))
10862 return omit_one_operand_loc (loc, type, arg1, arg0);
10863 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10864 Likewise for complex arithmetic with signed zeros. */
10865 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10866 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10867 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10868 && real_onep (arg1))
10869 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10871 /* Transform x * -1.0 into -x. */
10872 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10873 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10874 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10875 && real_minus_onep (arg1))
10876 return fold_convert_loc (loc, type, negate_expr (arg0));
10878 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10879 the result for floating point types due to rounding so it is applied
10880 only if -fassociative-math was specify. */
10881 if (flag_associative_math
10882 && TREE_CODE (arg0) == RDIV_EXPR
10883 && TREE_CODE (arg1) == REAL_CST
10884 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10886 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10887 arg1);
10888 if (tem)
10889 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10890 TREE_OPERAND (arg0, 1));
10893 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10894 if (operand_equal_p (arg0, arg1, 0))
10896 tree tem = fold_strip_sign_ops (arg0);
10897 if (tem != NULL_TREE)
10899 tem = fold_convert_loc (loc, type, tem);
10900 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10904 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10905 This is not the same for NaNs or if signed zeros are
10906 involved. */
10907 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10908 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10909 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10910 && TREE_CODE (arg1) == COMPLEX_CST
10911 && real_zerop (TREE_REALPART (arg1)))
10913 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10914 if (real_onep (TREE_IMAGPART (arg1)))
10915 return
10916 fold_build2_loc (loc, COMPLEX_EXPR, type,
10917 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10918 rtype, arg0)),
10919 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10920 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10921 return
10922 fold_build2_loc (loc, COMPLEX_EXPR, type,
10923 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10924 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10925 rtype, arg0)));
10928 /* Optimize z * conj(z) for floating point complex numbers.
10929 Guarded by flag_unsafe_math_optimizations as non-finite
10930 imaginary components don't produce scalar results. */
10931 if (flag_unsafe_math_optimizations
10932 && TREE_CODE (arg0) == CONJ_EXPR
10933 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10934 return fold_mult_zconjz (loc, type, arg1);
10935 if (flag_unsafe_math_optimizations
10936 && TREE_CODE (arg1) == CONJ_EXPR
10937 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10938 return fold_mult_zconjz (loc, type, arg0);
10940 if (flag_unsafe_math_optimizations)
10942 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10943 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10945 /* Optimizations of root(...)*root(...). */
10946 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10948 tree rootfn, arg;
10949 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10950 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10952 /* Optimize sqrt(x)*sqrt(x) as x. */
10953 if (BUILTIN_SQRT_P (fcode0)
10954 && operand_equal_p (arg00, arg10, 0)
10955 && ! HONOR_SNANS (TYPE_MODE (type)))
10956 return arg00;
10958 /* Optimize root(x)*root(y) as root(x*y). */
10959 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10960 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10961 return build_call_expr_loc (loc, rootfn, 1, arg);
10964 /* Optimize expN(x)*expN(y) as expN(x+y). */
10965 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10967 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10968 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10969 CALL_EXPR_ARG (arg0, 0),
10970 CALL_EXPR_ARG (arg1, 0));
10971 return build_call_expr_loc (loc, expfn, 1, arg);
10974 /* Optimizations of pow(...)*pow(...). */
10975 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10976 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10977 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10979 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10980 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10981 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10982 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10984 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10985 if (operand_equal_p (arg01, arg11, 0))
10987 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10988 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10989 arg00, arg10);
10990 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10993 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10994 if (operand_equal_p (arg00, arg10, 0))
10996 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10997 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10998 arg01, arg11);
10999 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11003 /* Optimize tan(x)*cos(x) as sin(x). */
11004 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11005 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11006 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11007 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11008 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11009 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11010 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11011 CALL_EXPR_ARG (arg1, 0), 0))
11013 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11015 if (sinfn != NULL_TREE)
11016 return build_call_expr_loc (loc, sinfn, 1,
11017 CALL_EXPR_ARG (arg0, 0));
11020 /* Optimize x*pow(x,c) as pow(x,c+1). */
11021 if (fcode1 == BUILT_IN_POW
11022 || fcode1 == BUILT_IN_POWF
11023 || fcode1 == BUILT_IN_POWL)
11025 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11026 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11027 if (TREE_CODE (arg11) == REAL_CST
11028 && !TREE_OVERFLOW (arg11)
11029 && operand_equal_p (arg0, arg10, 0))
11031 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11032 REAL_VALUE_TYPE c;
11033 tree arg;
11035 c = TREE_REAL_CST (arg11);
11036 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11037 arg = build_real (type, c);
11038 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11042 /* Optimize pow(x,c)*x as pow(x,c+1). */
11043 if (fcode0 == BUILT_IN_POW
11044 || fcode0 == BUILT_IN_POWF
11045 || fcode0 == BUILT_IN_POWL)
11047 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11048 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11049 if (TREE_CODE (arg01) == REAL_CST
11050 && !TREE_OVERFLOW (arg01)
11051 && operand_equal_p (arg1, arg00, 0))
11053 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11054 REAL_VALUE_TYPE c;
11055 tree arg;
11057 c = TREE_REAL_CST (arg01);
11058 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11059 arg = build_real (type, c);
11060 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11064 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11065 if (!in_gimple_form
11066 && optimize
11067 && operand_equal_p (arg0, arg1, 0))
11069 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11071 if (powfn)
11073 tree arg = build_real (type, dconst2);
11074 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11079 goto associate;
11081 case BIT_IOR_EXPR:
11082 bit_ior:
11083 if (integer_all_onesp (arg1))
11084 return omit_one_operand_loc (loc, type, arg1, arg0);
11085 if (integer_zerop (arg1))
11086 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11087 if (operand_equal_p (arg0, arg1, 0))
11088 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11090 /* ~X | X is -1. */
11091 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11092 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11094 t1 = build_zero_cst (type);
11095 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11096 return omit_one_operand_loc (loc, type, t1, arg1);
11099 /* X | ~X is -1. */
11100 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11101 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11103 t1 = build_zero_cst (type);
11104 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11105 return omit_one_operand_loc (loc, type, t1, arg0);
11108 /* Canonicalize (X & C1) | C2. */
11109 if (TREE_CODE (arg0) == BIT_AND_EXPR
11110 && TREE_CODE (arg1) == INTEGER_CST
11111 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11113 double_int c1, c2, c3, msk;
11114 int width = TYPE_PRECISION (type), w;
11115 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11116 c2 = tree_to_double_int (arg1);
11118 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11119 if ((c1 & c2) == c1)
11120 return omit_one_operand_loc (loc, type, arg1,
11121 TREE_OPERAND (arg0, 0));
11123 msk = double_int::mask (width);
11125 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11126 if (msk.and_not (c1 | c2).is_zero ())
11127 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11128 TREE_OPERAND (arg0, 0), arg1);
11130 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11131 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11132 mode which allows further optimizations. */
11133 c1 &= msk;
11134 c2 &= msk;
11135 c3 = c1.and_not (c2);
11136 for (w = BITS_PER_UNIT;
11137 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11138 w <<= 1)
11140 unsigned HOST_WIDE_INT mask
11141 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11142 if (((c1.low | c2.low) & mask) == mask
11143 && (c1.low & ~mask) == 0 && c1.high == 0)
11145 c3 = double_int::from_uhwi (mask);
11146 break;
11149 if (c3 != c1)
11150 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11151 fold_build2_loc (loc, BIT_AND_EXPR, type,
11152 TREE_OPERAND (arg0, 0),
11153 double_int_to_tree (type,
11154 c3)),
11155 arg1);
11158 /* (X & Y) | Y is (X, Y). */
11159 if (TREE_CODE (arg0) == BIT_AND_EXPR
11160 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11161 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11162 /* (X & Y) | X is (Y, X). */
11163 if (TREE_CODE (arg0) == BIT_AND_EXPR
11164 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11165 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11166 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11167 /* X | (X & Y) is (Y, X). */
11168 if (TREE_CODE (arg1) == BIT_AND_EXPR
11169 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11170 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11171 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11172 /* X | (Y & X) is (Y, X). */
11173 if (TREE_CODE (arg1) == BIT_AND_EXPR
11174 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11175 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11176 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11178 /* (X & ~Y) | (~X & Y) is X ^ Y */
11179 if (TREE_CODE (arg0) == BIT_AND_EXPR
11180 && TREE_CODE (arg1) == BIT_AND_EXPR)
11182 tree a0, a1, l0, l1, n0, n1;
11184 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11185 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11187 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11188 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11190 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11191 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11193 if ((operand_equal_p (n0, a0, 0)
11194 && operand_equal_p (n1, a1, 0))
11195 || (operand_equal_p (n0, a1, 0)
11196 && operand_equal_p (n1, a0, 0)))
11197 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11200 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11201 if (t1 != NULL_TREE)
11202 return t1;
11204 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11206 This results in more efficient code for machines without a NAND
11207 instruction. Combine will canonicalize to the first form
11208 which will allow use of NAND instructions provided by the
11209 backend if they exist. */
11210 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11211 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11213 return
11214 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11215 build2 (BIT_AND_EXPR, type,
11216 fold_convert_loc (loc, type,
11217 TREE_OPERAND (arg0, 0)),
11218 fold_convert_loc (loc, type,
11219 TREE_OPERAND (arg1, 0))));
11222 /* See if this can be simplified into a rotate first. If that
11223 is unsuccessful continue in the association code. */
11224 goto bit_rotate;
11226 case BIT_XOR_EXPR:
11227 if (integer_zerop (arg1))
11228 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11229 if (integer_all_onesp (arg1))
11230 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11231 if (operand_equal_p (arg0, arg1, 0))
11232 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11234 /* ~X ^ X is -1. */
11235 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11236 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11238 t1 = build_zero_cst (type);
11239 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11240 return omit_one_operand_loc (loc, type, t1, arg1);
11243 /* X ^ ~X is -1. */
11244 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11245 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11247 t1 = build_zero_cst (type);
11248 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11249 return omit_one_operand_loc (loc, type, t1, arg0);
11252 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11253 with a constant, and the two constants have no bits in common,
11254 we should treat this as a BIT_IOR_EXPR since this may produce more
11255 simplifications. */
11256 if (TREE_CODE (arg0) == BIT_AND_EXPR
11257 && TREE_CODE (arg1) == BIT_AND_EXPR
11258 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11259 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11260 && integer_zerop (const_binop (BIT_AND_EXPR,
11261 TREE_OPERAND (arg0, 1),
11262 TREE_OPERAND (arg1, 1))))
11264 code = BIT_IOR_EXPR;
11265 goto bit_ior;
11268 /* (X | Y) ^ X -> Y & ~ X*/
11269 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11270 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11272 tree t2 = TREE_OPERAND (arg0, 1);
11273 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11274 arg1);
11275 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11276 fold_convert_loc (loc, type, t2),
11277 fold_convert_loc (loc, type, t1));
11278 return t1;
11281 /* (Y | X) ^ X -> Y & ~ X*/
11282 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11283 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11285 tree t2 = TREE_OPERAND (arg0, 0);
11286 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11287 arg1);
11288 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11289 fold_convert_loc (loc, type, t2),
11290 fold_convert_loc (loc, type, t1));
11291 return t1;
11294 /* X ^ (X | Y) -> Y & ~ X*/
11295 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11296 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11298 tree t2 = TREE_OPERAND (arg1, 1);
11299 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11300 arg0);
11301 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11302 fold_convert_loc (loc, type, t2),
11303 fold_convert_loc (loc, type, t1));
11304 return t1;
11307 /* X ^ (Y | X) -> Y & ~ X*/
11308 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11309 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11311 tree t2 = TREE_OPERAND (arg1, 0);
11312 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11313 arg0);
11314 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11315 fold_convert_loc (loc, type, t2),
11316 fold_convert_loc (loc, type, t1));
11317 return t1;
11320 /* Convert ~X ^ ~Y to X ^ Y. */
11321 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11322 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11323 return fold_build2_loc (loc, code, type,
11324 fold_convert_loc (loc, type,
11325 TREE_OPERAND (arg0, 0)),
11326 fold_convert_loc (loc, type,
11327 TREE_OPERAND (arg1, 0)));
11329 /* Convert ~X ^ C to X ^ ~C. */
11330 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11331 && TREE_CODE (arg1) == INTEGER_CST)
11332 return fold_build2_loc (loc, code, type,
11333 fold_convert_loc (loc, type,
11334 TREE_OPERAND (arg0, 0)),
11335 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11337 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11338 if (TREE_CODE (arg0) == BIT_AND_EXPR
11339 && integer_onep (TREE_OPERAND (arg0, 1))
11340 && integer_onep (arg1))
11341 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11342 build_zero_cst (TREE_TYPE (arg0)));
11344 /* Fold (X & Y) ^ Y as ~X & Y. */
11345 if (TREE_CODE (arg0) == BIT_AND_EXPR
11346 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11348 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11349 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11350 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11351 fold_convert_loc (loc, type, arg1));
11353 /* Fold (X & Y) ^ X as ~Y & X. */
11354 if (TREE_CODE (arg0) == BIT_AND_EXPR
11355 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11356 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11358 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11359 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11360 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11361 fold_convert_loc (loc, type, arg1));
11363 /* Fold X ^ (X & Y) as X & ~Y. */
11364 if (TREE_CODE (arg1) == BIT_AND_EXPR
11365 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11367 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11368 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11369 fold_convert_loc (loc, type, arg0),
11370 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11372 /* Fold X ^ (Y & X) as ~Y & X. */
11373 if (TREE_CODE (arg1) == BIT_AND_EXPR
11374 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11375 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11377 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11378 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11379 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11380 fold_convert_loc (loc, type, arg0));
11383 /* See if this can be simplified into a rotate first. If that
11384 is unsuccessful continue in the association code. */
11385 goto bit_rotate;
11387 case BIT_AND_EXPR:
11388 if (integer_all_onesp (arg1))
11389 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11390 if (integer_zerop (arg1))
11391 return omit_one_operand_loc (loc, type, arg1, arg0);
11392 if (operand_equal_p (arg0, arg1, 0))
11393 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11395 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11396 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11397 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11398 || (TREE_CODE (arg0) == EQ_EXPR
11399 && integer_zerop (TREE_OPERAND (arg0, 1))))
11400 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11401 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11403 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11404 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11405 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11406 || (TREE_CODE (arg1) == EQ_EXPR
11407 && integer_zerop (TREE_OPERAND (arg1, 1))))
11408 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11409 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11411 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11412 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11413 && TREE_CODE (arg1) == INTEGER_CST
11414 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11416 tree tmp1 = fold_convert_loc (loc, type, arg1);
11417 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11418 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11419 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11420 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11421 return
11422 fold_convert_loc (loc, type,
11423 fold_build2_loc (loc, BIT_IOR_EXPR,
11424 type, tmp2, tmp3));
11427 /* (X | Y) & Y is (X, Y). */
11428 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11429 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11430 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11431 /* (X | Y) & X is (Y, X). */
11432 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11433 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11434 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11435 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11436 /* X & (X | Y) is (Y, X). */
11437 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11438 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11439 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11440 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11441 /* X & (Y | X) is (Y, X). */
11442 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11443 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11444 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11445 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11447 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11448 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11449 && integer_onep (TREE_OPERAND (arg0, 1))
11450 && integer_onep (arg1))
11452 tree tem2;
11453 tem = TREE_OPERAND (arg0, 0);
11454 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11455 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11456 tem, tem2);
11457 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11458 build_zero_cst (TREE_TYPE (tem)));
11460 /* Fold ~X & 1 as (X & 1) == 0. */
11461 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11462 && integer_onep (arg1))
11464 tree tem2;
11465 tem = TREE_OPERAND (arg0, 0);
11466 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11467 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11468 tem, tem2);
11469 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11470 build_zero_cst (TREE_TYPE (tem)));
11472 /* Fold !X & 1 as X == 0. */
11473 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11474 && integer_onep (arg1))
11476 tem = TREE_OPERAND (arg0, 0);
11477 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11478 build_zero_cst (TREE_TYPE (tem)));
11481 /* Fold (X ^ Y) & Y as ~X & Y. */
11482 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11483 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11485 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11486 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11487 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11488 fold_convert_loc (loc, type, arg1));
11490 /* Fold (X ^ Y) & X as ~Y & X. */
11491 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11492 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11493 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11495 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11496 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11497 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11498 fold_convert_loc (loc, type, arg1));
11500 /* Fold X & (X ^ Y) as X & ~Y. */
11501 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11502 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11504 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11505 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11506 fold_convert_loc (loc, type, arg0),
11507 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11509 /* Fold X & (Y ^ X) as ~Y & X. */
11510 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11511 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11512 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11514 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11515 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11516 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11517 fold_convert_loc (loc, type, arg0));
11520 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11521 multiple of 1 << CST. */
11522 if (TREE_CODE (arg1) == INTEGER_CST)
11524 double_int cst1 = tree_to_double_int (arg1);
11525 double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11526 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11527 if ((cst1 & ncst1) == ncst1
11528 && multiple_of_p (type, arg0,
11529 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11530 return fold_convert_loc (loc, type, arg0);
11533 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11534 bits from CST2. */
11535 if (TREE_CODE (arg1) == INTEGER_CST
11536 && TREE_CODE (arg0) == MULT_EXPR
11537 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11539 int arg1tz
11540 = tree_to_double_int (TREE_OPERAND (arg0, 1)).trailing_zeros ();
11541 if (arg1tz > 0)
11543 double_int arg1mask, masked;
11544 arg1mask = ~double_int::mask (arg1tz);
11545 arg1mask = arg1mask.ext (TYPE_PRECISION (type),
11546 TYPE_UNSIGNED (type));
11547 masked = arg1mask & tree_to_double_int (arg1);
11548 if (masked.is_zero ())
11549 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11550 arg0, arg1);
11551 else if (masked != tree_to_double_int (arg1))
11552 return fold_build2_loc (loc, code, type, op0,
11553 double_int_to_tree (type, masked));
11557 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11558 ((A & N) + B) & M -> (A + B) & M
11559 Similarly if (N & M) == 0,
11560 ((A | N) + B) & M -> (A + B) & M
11561 and for - instead of + (or unary - instead of +)
11562 and/or ^ instead of |.
11563 If B is constant and (B & M) == 0, fold into A & M. */
11564 if (host_integerp (arg1, 1))
11566 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11567 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11568 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11569 && (TREE_CODE (arg0) == PLUS_EXPR
11570 || TREE_CODE (arg0) == MINUS_EXPR
11571 || TREE_CODE (arg0) == NEGATE_EXPR)
11572 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11573 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11575 tree pmop[2];
11576 int which = 0;
11577 unsigned HOST_WIDE_INT cst0;
11579 /* Now we know that arg0 is (C + D) or (C - D) or
11580 -C and arg1 (M) is == (1LL << cst) - 1.
11581 Store C into PMOP[0] and D into PMOP[1]. */
11582 pmop[0] = TREE_OPERAND (arg0, 0);
11583 pmop[1] = NULL;
11584 if (TREE_CODE (arg0) != NEGATE_EXPR)
11586 pmop[1] = TREE_OPERAND (arg0, 1);
11587 which = 1;
11590 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11591 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11592 & cst1) != cst1)
11593 which = -1;
11595 for (; which >= 0; which--)
11596 switch (TREE_CODE (pmop[which]))
11598 case BIT_AND_EXPR:
11599 case BIT_IOR_EXPR:
11600 case BIT_XOR_EXPR:
11601 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11602 != INTEGER_CST)
11603 break;
11604 /* tree_low_cst not used, because we don't care about
11605 the upper bits. */
11606 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11607 cst0 &= cst1;
11608 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11610 if (cst0 != cst1)
11611 break;
11613 else if (cst0 != 0)
11614 break;
11615 /* If C or D is of the form (A & N) where
11616 (N & M) == M, or of the form (A | N) or
11617 (A ^ N) where (N & M) == 0, replace it with A. */
11618 pmop[which] = TREE_OPERAND (pmop[which], 0);
11619 break;
11620 case INTEGER_CST:
11621 /* If C or D is a N where (N & M) == 0, it can be
11622 omitted (assumed 0). */
11623 if ((TREE_CODE (arg0) == PLUS_EXPR
11624 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11625 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11626 pmop[which] = NULL;
11627 break;
11628 default:
11629 break;
11632 /* Only build anything new if we optimized one or both arguments
11633 above. */
11634 if (pmop[0] != TREE_OPERAND (arg0, 0)
11635 || (TREE_CODE (arg0) != NEGATE_EXPR
11636 && pmop[1] != TREE_OPERAND (arg0, 1)))
11638 tree utype = TREE_TYPE (arg0);
11639 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11641 /* Perform the operations in a type that has defined
11642 overflow behavior. */
11643 utype = unsigned_type_for (TREE_TYPE (arg0));
11644 if (pmop[0] != NULL)
11645 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11646 if (pmop[1] != NULL)
11647 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11650 if (TREE_CODE (arg0) == NEGATE_EXPR)
11651 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11652 else if (TREE_CODE (arg0) == PLUS_EXPR)
11654 if (pmop[0] != NULL && pmop[1] != NULL)
11655 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11656 pmop[0], pmop[1]);
11657 else if (pmop[0] != NULL)
11658 tem = pmop[0];
11659 else if (pmop[1] != NULL)
11660 tem = pmop[1];
11661 else
11662 return build_int_cst (type, 0);
11664 else if (pmop[0] == NULL)
11665 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11666 else
11667 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11668 pmop[0], pmop[1]);
11669 /* TEM is now the new binary +, - or unary - replacement. */
11670 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11671 fold_convert_loc (loc, utype, arg1));
11672 return fold_convert_loc (loc, type, tem);
11677 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11678 if (t1 != NULL_TREE)
11679 return t1;
11680 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11681 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11682 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11684 unsigned int prec
11685 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11687 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11688 && (~TREE_INT_CST_LOW (arg1)
11689 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11690 return
11691 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11694 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11696 This results in more efficient code for machines without a NOR
11697 instruction. Combine will canonicalize to the first form
11698 which will allow use of NOR instructions provided by the
11699 backend if they exist. */
11700 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11701 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11703 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11704 build2 (BIT_IOR_EXPR, type,
11705 fold_convert_loc (loc, type,
11706 TREE_OPERAND (arg0, 0)),
11707 fold_convert_loc (loc, type,
11708 TREE_OPERAND (arg1, 0))));
11711 /* If arg0 is derived from the address of an object or function, we may
11712 be able to fold this expression using the object or function's
11713 alignment. */
11714 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11716 unsigned HOST_WIDE_INT modulus, residue;
11717 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11719 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11720 integer_onep (arg1));
11722 /* This works because modulus is a power of 2. If this weren't the
11723 case, we'd have to replace it by its greatest power-of-2
11724 divisor: modulus & -modulus. */
11725 if (low < modulus)
11726 return build_int_cst (type, residue & low);
11729 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11730 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11731 if the new mask might be further optimized. */
11732 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11733 || TREE_CODE (arg0) == RSHIFT_EXPR)
11734 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11735 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11736 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11737 < TYPE_PRECISION (TREE_TYPE (arg0))
11738 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11739 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11741 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11742 unsigned HOST_WIDE_INT mask
11743 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11744 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11745 tree shift_type = TREE_TYPE (arg0);
11747 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11748 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11749 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11750 && TYPE_PRECISION (TREE_TYPE (arg0))
11751 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11753 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11754 tree arg00 = TREE_OPERAND (arg0, 0);
11755 /* See if more bits can be proven as zero because of
11756 zero extension. */
11757 if (TREE_CODE (arg00) == NOP_EXPR
11758 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11760 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11761 if (TYPE_PRECISION (inner_type)
11762 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11763 && TYPE_PRECISION (inner_type) < prec)
11765 prec = TYPE_PRECISION (inner_type);
11766 /* See if we can shorten the right shift. */
11767 if (shiftc < prec)
11768 shift_type = inner_type;
11771 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11772 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11773 zerobits <<= prec - shiftc;
11774 /* For arithmetic shift if sign bit could be set, zerobits
11775 can contain actually sign bits, so no transformation is
11776 possible, unless MASK masks them all away. In that
11777 case the shift needs to be converted into logical shift. */
11778 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11779 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11781 if ((mask & zerobits) == 0)
11782 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11783 else
11784 zerobits = 0;
11788 /* ((X << 16) & 0xff00) is (X, 0). */
11789 if ((mask & zerobits) == mask)
11790 return omit_one_operand_loc (loc, type,
11791 build_int_cst (type, 0), arg0);
11793 newmask = mask | zerobits;
11794 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11796 unsigned int prec;
11798 /* Only do the transformation if NEWMASK is some integer
11799 mode's mask. */
11800 for (prec = BITS_PER_UNIT;
11801 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11802 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11803 break;
11804 if (prec < HOST_BITS_PER_WIDE_INT
11805 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11807 tree newmaskt;
11809 if (shift_type != TREE_TYPE (arg0))
11811 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11812 fold_convert_loc (loc, shift_type,
11813 TREE_OPERAND (arg0, 0)),
11814 TREE_OPERAND (arg0, 1));
11815 tem = fold_convert_loc (loc, type, tem);
11817 else
11818 tem = op0;
11819 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11820 if (!tree_int_cst_equal (newmaskt, arg1))
11821 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11826 goto associate;
11828 case RDIV_EXPR:
11829 /* Don't touch a floating-point divide by zero unless the mode
11830 of the constant can represent infinity. */
11831 if (TREE_CODE (arg1) == REAL_CST
11832 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11833 && real_zerop (arg1))
11834 return NULL_TREE;
11836 /* Optimize A / A to 1.0 if we don't care about
11837 NaNs or Infinities. Skip the transformation
11838 for non-real operands. */
11839 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11840 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11841 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11842 && operand_equal_p (arg0, arg1, 0))
11844 tree r = build_real (TREE_TYPE (arg0), dconst1);
11846 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11849 /* The complex version of the above A / A optimization. */
11850 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11851 && operand_equal_p (arg0, arg1, 0))
11853 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11854 if (! HONOR_NANS (TYPE_MODE (elem_type))
11855 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11857 tree r = build_real (elem_type, dconst1);
11858 /* omit_two_operands will call fold_convert for us. */
11859 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11863 /* (-A) / (-B) -> A / B */
11864 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11865 return fold_build2_loc (loc, RDIV_EXPR, type,
11866 TREE_OPERAND (arg0, 0),
11867 negate_expr (arg1));
11868 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11869 return fold_build2_loc (loc, RDIV_EXPR, type,
11870 negate_expr (arg0),
11871 TREE_OPERAND (arg1, 0));
11873 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11874 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11875 && real_onep (arg1))
11876 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11878 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11879 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11880 && real_minus_onep (arg1))
11881 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11882 negate_expr (arg0)));
11884 /* If ARG1 is a constant, we can convert this to a multiply by the
11885 reciprocal. This does not have the same rounding properties,
11886 so only do this if -freciprocal-math. We can actually
11887 always safely do it if ARG1 is a power of two, but it's hard to
11888 tell if it is or not in a portable manner. */
11889 if (optimize
11890 && (TREE_CODE (arg1) == REAL_CST
11891 || (TREE_CODE (arg1) == COMPLEX_CST
11892 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11893 || (TREE_CODE (arg1) == VECTOR_CST
11894 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11896 if (flag_reciprocal_math
11897 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11898 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11899 /* Find the reciprocal if optimizing and the result is exact.
11900 TODO: Complex reciprocal not implemented. */
11901 if (TREE_CODE (arg1) != COMPLEX_CST)
11903 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11905 if (inverse)
11906 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11909 /* Convert A/B/C to A/(B*C). */
11910 if (flag_reciprocal_math
11911 && TREE_CODE (arg0) == RDIV_EXPR)
11912 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11913 fold_build2_loc (loc, MULT_EXPR, type,
11914 TREE_OPERAND (arg0, 1), arg1));
11916 /* Convert A/(B/C) to (A/B)*C. */
11917 if (flag_reciprocal_math
11918 && TREE_CODE (arg1) == RDIV_EXPR)
11919 return fold_build2_loc (loc, MULT_EXPR, type,
11920 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11921 TREE_OPERAND (arg1, 0)),
11922 TREE_OPERAND (arg1, 1));
11924 /* Convert C1/(X*C2) into (C1/C2)/X. */
11925 if (flag_reciprocal_math
11926 && TREE_CODE (arg1) == MULT_EXPR
11927 && TREE_CODE (arg0) == REAL_CST
11928 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11930 tree tem = const_binop (RDIV_EXPR, arg0,
11931 TREE_OPERAND (arg1, 1));
11932 if (tem)
11933 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11934 TREE_OPERAND (arg1, 0));
11937 if (flag_unsafe_math_optimizations)
11939 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11940 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11942 /* Optimize sin(x)/cos(x) as tan(x). */
11943 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11944 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11945 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11946 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11947 CALL_EXPR_ARG (arg1, 0), 0))
11949 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11951 if (tanfn != NULL_TREE)
11952 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11955 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11956 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11957 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11958 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11959 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11960 CALL_EXPR_ARG (arg1, 0), 0))
11962 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11964 if (tanfn != NULL_TREE)
11966 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11967 CALL_EXPR_ARG (arg0, 0));
11968 return fold_build2_loc (loc, RDIV_EXPR, type,
11969 build_real (type, dconst1), tmp);
11973 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11974 NaNs or Infinities. */
11975 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11976 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11977 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11979 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11980 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11982 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11983 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11984 && operand_equal_p (arg00, arg01, 0))
11986 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11988 if (cosfn != NULL_TREE)
11989 return build_call_expr_loc (loc, cosfn, 1, arg00);
11993 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11994 NaNs or Infinities. */
11995 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11996 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11997 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11999 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12000 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12002 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12003 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12004 && operand_equal_p (arg00, arg01, 0))
12006 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12008 if (cosfn != NULL_TREE)
12010 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12011 return fold_build2_loc (loc, RDIV_EXPR, type,
12012 build_real (type, dconst1),
12013 tmp);
12018 /* Optimize pow(x,c)/x as pow(x,c-1). */
12019 if (fcode0 == BUILT_IN_POW
12020 || fcode0 == BUILT_IN_POWF
12021 || fcode0 == BUILT_IN_POWL)
12023 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12024 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12025 if (TREE_CODE (arg01) == REAL_CST
12026 && !TREE_OVERFLOW (arg01)
12027 && operand_equal_p (arg1, arg00, 0))
12029 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12030 REAL_VALUE_TYPE c;
12031 tree arg;
12033 c = TREE_REAL_CST (arg01);
12034 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12035 arg = build_real (type, c);
12036 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12040 /* Optimize a/root(b/c) into a*root(c/b). */
12041 if (BUILTIN_ROOT_P (fcode1))
12043 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12045 if (TREE_CODE (rootarg) == RDIV_EXPR)
12047 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12048 tree b = TREE_OPERAND (rootarg, 0);
12049 tree c = TREE_OPERAND (rootarg, 1);
12051 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12053 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12054 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12058 /* Optimize x/expN(y) into x*expN(-y). */
12059 if (BUILTIN_EXPONENT_P (fcode1))
12061 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12062 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12063 arg1 = build_call_expr_loc (loc,
12064 expfn, 1,
12065 fold_convert_loc (loc, type, arg));
12066 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12069 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12070 if (fcode1 == BUILT_IN_POW
12071 || fcode1 == BUILT_IN_POWF
12072 || fcode1 == BUILT_IN_POWL)
12074 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12075 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12076 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12077 tree neg11 = fold_convert_loc (loc, type,
12078 negate_expr (arg11));
12079 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12080 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12083 return NULL_TREE;
12085 case TRUNC_DIV_EXPR:
12086 /* Optimize (X & (-A)) / A where A is a power of 2,
12087 to X >> log2(A) */
12088 if (TREE_CODE (arg0) == BIT_AND_EXPR
12089 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12090 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12092 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12093 arg1, TREE_OPERAND (arg0, 1));
12094 if (sum && integer_zerop (sum)) {
12095 unsigned long pow2;
12097 if (TREE_INT_CST_LOW (arg1))
12098 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12099 else
12100 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12101 + HOST_BITS_PER_WIDE_INT;
12103 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12104 TREE_OPERAND (arg0, 0),
12105 build_int_cst (integer_type_node, pow2));
12109 /* Fall through */
12111 case FLOOR_DIV_EXPR:
12112 /* Simplify A / (B << N) where A and B are positive and B is
12113 a power of 2, to A >> (N + log2(B)). */
12114 strict_overflow_p = false;
12115 if (TREE_CODE (arg1) == LSHIFT_EXPR
12116 && (TYPE_UNSIGNED (type)
12117 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12119 tree sval = TREE_OPERAND (arg1, 0);
12120 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12122 tree sh_cnt = TREE_OPERAND (arg1, 1);
12123 unsigned long pow2;
12125 if (TREE_INT_CST_LOW (sval))
12126 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12127 else
12128 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12129 + HOST_BITS_PER_WIDE_INT;
12131 if (strict_overflow_p)
12132 fold_overflow_warning (("assuming signed overflow does not "
12133 "occur when simplifying A / (B << N)"),
12134 WARN_STRICT_OVERFLOW_MISC);
12136 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12137 sh_cnt,
12138 build_int_cst (TREE_TYPE (sh_cnt),
12139 pow2));
12140 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12141 fold_convert_loc (loc, type, arg0), sh_cnt);
12145 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12146 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12147 if (INTEGRAL_TYPE_P (type)
12148 && TYPE_UNSIGNED (type)
12149 && code == FLOOR_DIV_EXPR)
12150 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12152 /* Fall through */
12154 case ROUND_DIV_EXPR:
12155 case CEIL_DIV_EXPR:
12156 case EXACT_DIV_EXPR:
12157 if (integer_onep (arg1))
12158 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12159 if (integer_zerop (arg1))
12160 return NULL_TREE;
12161 /* X / -1 is -X. */
12162 if (!TYPE_UNSIGNED (type)
12163 && TREE_CODE (arg1) == INTEGER_CST
12164 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12165 && TREE_INT_CST_HIGH (arg1) == -1)
12166 return fold_convert_loc (loc, type, negate_expr (arg0));
12168 /* Convert -A / -B to A / B when the type is signed and overflow is
12169 undefined. */
12170 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12171 && TREE_CODE (arg0) == NEGATE_EXPR
12172 && negate_expr_p (arg1))
12174 if (INTEGRAL_TYPE_P (type))
12175 fold_overflow_warning (("assuming signed overflow does not occur "
12176 "when distributing negation across "
12177 "division"),
12178 WARN_STRICT_OVERFLOW_MISC);
12179 return fold_build2_loc (loc, code, type,
12180 fold_convert_loc (loc, type,
12181 TREE_OPERAND (arg0, 0)),
12182 fold_convert_loc (loc, type,
12183 negate_expr (arg1)));
12185 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12186 && TREE_CODE (arg1) == NEGATE_EXPR
12187 && negate_expr_p (arg0))
12189 if (INTEGRAL_TYPE_P (type))
12190 fold_overflow_warning (("assuming signed overflow does not occur "
12191 "when distributing negation across "
12192 "division"),
12193 WARN_STRICT_OVERFLOW_MISC);
12194 return fold_build2_loc (loc, code, type,
12195 fold_convert_loc (loc, type,
12196 negate_expr (arg0)),
12197 fold_convert_loc (loc, type,
12198 TREE_OPERAND (arg1, 0)));
12201 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12202 operation, EXACT_DIV_EXPR.
12204 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12205 At one time others generated faster code, it's not clear if they do
12206 after the last round to changes to the DIV code in expmed.c. */
12207 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12208 && multiple_of_p (type, arg0, arg1))
12209 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12211 strict_overflow_p = false;
12212 if (TREE_CODE (arg1) == INTEGER_CST
12213 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12214 &strict_overflow_p)))
12216 if (strict_overflow_p)
12217 fold_overflow_warning (("assuming signed overflow does not occur "
12218 "when simplifying division"),
12219 WARN_STRICT_OVERFLOW_MISC);
12220 return fold_convert_loc (loc, type, tem);
12223 return NULL_TREE;
12225 case CEIL_MOD_EXPR:
12226 case FLOOR_MOD_EXPR:
12227 case ROUND_MOD_EXPR:
12228 case TRUNC_MOD_EXPR:
12229 /* X % 1 is always zero, but be sure to preserve any side
12230 effects in X. */
12231 if (integer_onep (arg1))
12232 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12234 /* X % 0, return X % 0 unchanged so that we can get the
12235 proper warnings and errors. */
12236 if (integer_zerop (arg1))
12237 return NULL_TREE;
12239 /* 0 % X is always zero, but be sure to preserve any side
12240 effects in X. Place this after checking for X == 0. */
12241 if (integer_zerop (arg0))
12242 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12244 /* X % -1 is zero. */
12245 if (!TYPE_UNSIGNED (type)
12246 && TREE_CODE (arg1) == INTEGER_CST
12247 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12248 && TREE_INT_CST_HIGH (arg1) == -1)
12249 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12251 /* X % -C is the same as X % C. */
12252 if (code == TRUNC_MOD_EXPR
12253 && !TYPE_UNSIGNED (type)
12254 && TREE_CODE (arg1) == INTEGER_CST
12255 && !TREE_OVERFLOW (arg1)
12256 && TREE_INT_CST_HIGH (arg1) < 0
12257 && !TYPE_OVERFLOW_TRAPS (type)
12258 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12259 && !sign_bit_p (arg1, arg1))
12260 return fold_build2_loc (loc, code, type,
12261 fold_convert_loc (loc, type, arg0),
12262 fold_convert_loc (loc, type,
12263 negate_expr (arg1)));
12265 /* X % -Y is the same as X % Y. */
12266 if (code == TRUNC_MOD_EXPR
12267 && !TYPE_UNSIGNED (type)
12268 && TREE_CODE (arg1) == NEGATE_EXPR
12269 && !TYPE_OVERFLOW_TRAPS (type))
12270 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12271 fold_convert_loc (loc, type,
12272 TREE_OPERAND (arg1, 0)));
12274 strict_overflow_p = false;
12275 if (TREE_CODE (arg1) == INTEGER_CST
12276 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12277 &strict_overflow_p)))
12279 if (strict_overflow_p)
12280 fold_overflow_warning (("assuming signed overflow does not occur "
12281 "when simplifying modulus"),
12282 WARN_STRICT_OVERFLOW_MISC);
12283 return fold_convert_loc (loc, type, tem);
12286 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12287 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12288 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12289 && (TYPE_UNSIGNED (type)
12290 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12292 tree c = arg1;
12293 /* Also optimize A % (C << N) where C is a power of 2,
12294 to A & ((C << N) - 1). */
12295 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12296 c = TREE_OPERAND (arg1, 0);
12298 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12300 tree mask
12301 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12302 build_int_cst (TREE_TYPE (arg1), 1));
12303 if (strict_overflow_p)
12304 fold_overflow_warning (("assuming signed overflow does not "
12305 "occur when simplifying "
12306 "X % (power of two)"),
12307 WARN_STRICT_OVERFLOW_MISC);
12308 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12309 fold_convert_loc (loc, type, arg0),
12310 fold_convert_loc (loc, type, mask));
12314 return NULL_TREE;
12316 case LROTATE_EXPR:
12317 case RROTATE_EXPR:
12318 if (integer_all_onesp (arg0))
12319 return omit_one_operand_loc (loc, type, arg0, arg1);
12320 goto shift;
12322 case RSHIFT_EXPR:
12323 /* Optimize -1 >> x for arithmetic right shifts. */
12324 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12325 && tree_expr_nonnegative_p (arg1))
12326 return omit_one_operand_loc (loc, type, arg0, arg1);
12327 /* ... fall through ... */
12329 case LSHIFT_EXPR:
12330 shift:
12331 if (integer_zerop (arg1))
12332 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12333 if (integer_zerop (arg0))
12334 return omit_one_operand_loc (loc, type, arg0, arg1);
12336 /* Since negative shift count is not well-defined,
12337 don't try to compute it in the compiler. */
12338 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12339 return NULL_TREE;
12341 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12342 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12343 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12344 && host_integerp (TREE_OPERAND (arg0, 1), false)
12345 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12347 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12348 + TREE_INT_CST_LOW (arg1));
12350 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12351 being well defined. */
12352 if (low >= TYPE_PRECISION (type))
12354 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12355 low = low % TYPE_PRECISION (type);
12356 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12357 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12358 TREE_OPERAND (arg0, 0));
12359 else
12360 low = TYPE_PRECISION (type) - 1;
12363 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12364 build_int_cst (type, low));
12367 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12368 into x & ((unsigned)-1 >> c) for unsigned types. */
12369 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12370 || (TYPE_UNSIGNED (type)
12371 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12372 && host_integerp (arg1, false)
12373 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12374 && host_integerp (TREE_OPERAND (arg0, 1), false)
12375 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12377 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12378 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12379 tree lshift;
12380 tree arg00;
12382 if (low0 == low1)
12384 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12386 lshift = build_int_cst (type, -1);
12387 lshift = int_const_binop (code, lshift, arg1);
12389 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12393 /* Rewrite an LROTATE_EXPR by a constant into an
12394 RROTATE_EXPR by a new constant. */
12395 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12397 tree tem = build_int_cst (TREE_TYPE (arg1),
12398 TYPE_PRECISION (type));
12399 tem = const_binop (MINUS_EXPR, tem, arg1);
12400 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12403 /* If we have a rotate of a bit operation with the rotate count and
12404 the second operand of the bit operation both constant,
12405 permute the two operations. */
12406 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12407 && (TREE_CODE (arg0) == BIT_AND_EXPR
12408 || TREE_CODE (arg0) == BIT_IOR_EXPR
12409 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12410 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12411 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12412 fold_build2_loc (loc, code, type,
12413 TREE_OPERAND (arg0, 0), arg1),
12414 fold_build2_loc (loc, code, type,
12415 TREE_OPERAND (arg0, 1), arg1));
12417 /* Two consecutive rotates adding up to the precision of the
12418 type can be ignored. */
12419 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12420 && TREE_CODE (arg0) == RROTATE_EXPR
12421 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12422 && TREE_INT_CST_HIGH (arg1) == 0
12423 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12424 && ((TREE_INT_CST_LOW (arg1)
12425 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12426 == (unsigned int) TYPE_PRECISION (type)))
12427 return TREE_OPERAND (arg0, 0);
12429 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12430 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12431 if the latter can be further optimized. */
12432 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12433 && TREE_CODE (arg0) == BIT_AND_EXPR
12434 && TREE_CODE (arg1) == INTEGER_CST
12435 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12437 tree mask = fold_build2_loc (loc, code, type,
12438 fold_convert_loc (loc, type,
12439 TREE_OPERAND (arg0, 1)),
12440 arg1);
12441 tree shift = fold_build2_loc (loc, code, type,
12442 fold_convert_loc (loc, type,
12443 TREE_OPERAND (arg0, 0)),
12444 arg1);
12445 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12446 if (tem)
12447 return tem;
12450 return NULL_TREE;
12452 case MIN_EXPR:
12453 if (operand_equal_p (arg0, arg1, 0))
12454 return omit_one_operand_loc (loc, type, arg0, arg1);
12455 if (INTEGRAL_TYPE_P (type)
12456 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12457 return omit_one_operand_loc (loc, type, arg1, arg0);
12458 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12459 if (tem)
12460 return tem;
12461 goto associate;
12463 case MAX_EXPR:
12464 if (operand_equal_p (arg0, arg1, 0))
12465 return omit_one_operand_loc (loc, type, arg0, arg1);
12466 if (INTEGRAL_TYPE_P (type)
12467 && TYPE_MAX_VALUE (type)
12468 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12469 return omit_one_operand_loc (loc, type, arg1, arg0);
12470 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12471 if (tem)
12472 return tem;
12473 goto associate;
12475 case TRUTH_ANDIF_EXPR:
12476 /* Note that the operands of this must be ints
12477 and their values must be 0 or 1.
12478 ("true" is a fixed value perhaps depending on the language.) */
12479 /* If first arg is constant zero, return it. */
12480 if (integer_zerop (arg0))
12481 return fold_convert_loc (loc, type, arg0);
12482 case TRUTH_AND_EXPR:
12483 /* If either arg is constant true, drop it. */
12484 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12485 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12486 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12487 /* Preserve sequence points. */
12488 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12489 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12490 /* If second arg is constant zero, result is zero, but first arg
12491 must be evaluated. */
12492 if (integer_zerop (arg1))
12493 return omit_one_operand_loc (loc, type, arg1, arg0);
12494 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12495 case will be handled here. */
12496 if (integer_zerop (arg0))
12497 return omit_one_operand_loc (loc, type, arg0, arg1);
12499 /* !X && X is always false. */
12500 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12501 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12502 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12503 /* X && !X is always false. */
12504 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12505 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12506 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12508 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12509 means A >= Y && A != MAX, but in this case we know that
12510 A < X <= MAX. */
12512 if (!TREE_SIDE_EFFECTS (arg0)
12513 && !TREE_SIDE_EFFECTS (arg1))
12515 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12516 if (tem && !operand_equal_p (tem, arg0, 0))
12517 return fold_build2_loc (loc, code, type, tem, arg1);
12519 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12520 if (tem && !operand_equal_p (tem, arg1, 0))
12521 return fold_build2_loc (loc, code, type, arg0, tem);
12524 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12525 != NULL_TREE)
12526 return tem;
12528 return NULL_TREE;
12530 case TRUTH_ORIF_EXPR:
12531 /* Note that the operands of this must be ints
12532 and their values must be 0 or true.
12533 ("true" is a fixed value perhaps depending on the language.) */
12534 /* If first arg is constant true, return it. */
12535 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12536 return fold_convert_loc (loc, type, arg0);
12537 case TRUTH_OR_EXPR:
12538 /* If either arg is constant zero, drop it. */
12539 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12540 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12541 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12542 /* Preserve sequence points. */
12543 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12544 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12545 /* If second arg is constant true, result is true, but we must
12546 evaluate first arg. */
12547 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12548 return omit_one_operand_loc (loc, type, arg1, arg0);
12549 /* Likewise for first arg, but note this only occurs here for
12550 TRUTH_OR_EXPR. */
12551 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12552 return omit_one_operand_loc (loc, type, arg0, arg1);
12554 /* !X || X is always true. */
12555 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12556 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12557 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12558 /* X || !X is always true. */
12559 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12560 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12561 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12563 /* (X && !Y) || (!X && Y) is X ^ Y */
12564 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12565 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12567 tree a0, a1, l0, l1, n0, n1;
12569 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12570 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12572 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12573 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12575 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12576 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12578 if ((operand_equal_p (n0, a0, 0)
12579 && operand_equal_p (n1, a1, 0))
12580 || (operand_equal_p (n0, a1, 0)
12581 && operand_equal_p (n1, a0, 0)))
12582 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12585 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12586 != NULL_TREE)
12587 return tem;
12589 return NULL_TREE;
12591 case TRUTH_XOR_EXPR:
12592 /* If the second arg is constant zero, drop it. */
12593 if (integer_zerop (arg1))
12594 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12595 /* If the second arg is constant true, this is a logical inversion. */
12596 if (integer_onep (arg1))
12598 /* Only call invert_truthvalue if operand is a truth value. */
12599 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12600 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12601 else
12602 tem = invert_truthvalue_loc (loc, arg0);
12603 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12605 /* Identical arguments cancel to zero. */
12606 if (operand_equal_p (arg0, arg1, 0))
12607 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12609 /* !X ^ X is always true. */
12610 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12611 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12612 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12614 /* X ^ !X is always true. */
12615 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12616 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12617 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12619 return NULL_TREE;
12621 case EQ_EXPR:
12622 case NE_EXPR:
12623 STRIP_NOPS (arg0);
12624 STRIP_NOPS (arg1);
12626 tem = fold_comparison (loc, code, type, op0, op1);
12627 if (tem != NULL_TREE)
12628 return tem;
12630 /* bool_var != 0 becomes bool_var. */
12631 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12632 && code == NE_EXPR)
12633 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12635 /* bool_var == 1 becomes bool_var. */
12636 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12637 && code == EQ_EXPR)
12638 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12640 /* bool_var != 1 becomes !bool_var. */
12641 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12642 && code == NE_EXPR)
12643 return fold_convert_loc (loc, type,
12644 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12645 TREE_TYPE (arg0), arg0));
12647 /* bool_var == 0 becomes !bool_var. */
12648 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12649 && code == EQ_EXPR)
12650 return fold_convert_loc (loc, type,
12651 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12652 TREE_TYPE (arg0), arg0));
12654 /* !exp != 0 becomes !exp */
12655 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12656 && code == NE_EXPR)
12657 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12659 /* If this is an equality comparison of the address of two non-weak,
12660 unaliased symbols neither of which are extern (since we do not
12661 have access to attributes for externs), then we know the result. */
12662 if (TREE_CODE (arg0) == ADDR_EXPR
12663 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12664 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12665 && ! lookup_attribute ("alias",
12666 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12667 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12668 && TREE_CODE (arg1) == ADDR_EXPR
12669 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12670 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12671 && ! lookup_attribute ("alias",
12672 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12673 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12675 /* We know that we're looking at the address of two
12676 non-weak, unaliased, static _DECL nodes.
12678 It is both wasteful and incorrect to call operand_equal_p
12679 to compare the two ADDR_EXPR nodes. It is wasteful in that
12680 all we need to do is test pointer equality for the arguments
12681 to the two ADDR_EXPR nodes. It is incorrect to use
12682 operand_equal_p as that function is NOT equivalent to a
12683 C equality test. It can in fact return false for two
12684 objects which would test as equal using the C equality
12685 operator. */
12686 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12687 return constant_boolean_node (equal
12688 ? code == EQ_EXPR : code != EQ_EXPR,
12689 type);
12692 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12693 a MINUS_EXPR of a constant, we can convert it into a comparison with
12694 a revised constant as long as no overflow occurs. */
12695 if (TREE_CODE (arg1) == INTEGER_CST
12696 && (TREE_CODE (arg0) == PLUS_EXPR
12697 || TREE_CODE (arg0) == MINUS_EXPR)
12698 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12699 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12700 ? MINUS_EXPR : PLUS_EXPR,
12701 fold_convert_loc (loc, TREE_TYPE (arg0),
12702 arg1),
12703 TREE_OPERAND (arg0, 1)))
12704 && !TREE_OVERFLOW (tem))
12705 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12707 /* Similarly for a NEGATE_EXPR. */
12708 if (TREE_CODE (arg0) == NEGATE_EXPR
12709 && TREE_CODE (arg1) == INTEGER_CST
12710 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12711 arg1)))
12712 && TREE_CODE (tem) == INTEGER_CST
12713 && !TREE_OVERFLOW (tem))
12714 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12716 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12717 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12718 && TREE_CODE (arg1) == INTEGER_CST
12719 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12720 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12721 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12722 fold_convert_loc (loc,
12723 TREE_TYPE (arg0),
12724 arg1),
12725 TREE_OPERAND (arg0, 1)));
12727 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12728 if ((TREE_CODE (arg0) == PLUS_EXPR
12729 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12730 || TREE_CODE (arg0) == MINUS_EXPR)
12731 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12732 0)),
12733 arg1, 0)
12734 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12735 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12737 tree val = TREE_OPERAND (arg0, 1);
12738 return omit_two_operands_loc (loc, type,
12739 fold_build2_loc (loc, code, type,
12740 val,
12741 build_int_cst (TREE_TYPE (val),
12742 0)),
12743 TREE_OPERAND (arg0, 0), arg1);
12746 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12747 if (TREE_CODE (arg0) == MINUS_EXPR
12748 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12749 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12750 1)),
12751 arg1, 0)
12752 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12754 return omit_two_operands_loc (loc, type,
12755 code == NE_EXPR
12756 ? boolean_true_node : boolean_false_node,
12757 TREE_OPERAND (arg0, 1), arg1);
12760 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12761 for !=. Don't do this for ordered comparisons due to overflow. */
12762 if (TREE_CODE (arg0) == MINUS_EXPR
12763 && integer_zerop (arg1))
12764 return fold_build2_loc (loc, code, type,
12765 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12767 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12768 if (TREE_CODE (arg0) == ABS_EXPR
12769 && (integer_zerop (arg1) || real_zerop (arg1)))
12770 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12772 /* If this is an EQ or NE comparison with zero and ARG0 is
12773 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12774 two operations, but the latter can be done in one less insn
12775 on machines that have only two-operand insns or on which a
12776 constant cannot be the first operand. */
12777 if (TREE_CODE (arg0) == BIT_AND_EXPR
12778 && integer_zerop (arg1))
12780 tree arg00 = TREE_OPERAND (arg0, 0);
12781 tree arg01 = TREE_OPERAND (arg0, 1);
12782 if (TREE_CODE (arg00) == LSHIFT_EXPR
12783 && integer_onep (TREE_OPERAND (arg00, 0)))
12785 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12786 arg01, TREE_OPERAND (arg00, 1));
12787 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12788 build_int_cst (TREE_TYPE (arg0), 1));
12789 return fold_build2_loc (loc, code, type,
12790 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12791 arg1);
12793 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12794 && integer_onep (TREE_OPERAND (arg01, 0)))
12796 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12797 arg00, TREE_OPERAND (arg01, 1));
12798 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12799 build_int_cst (TREE_TYPE (arg0), 1));
12800 return fold_build2_loc (loc, code, type,
12801 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12802 arg1);
12806 /* If this is an NE or EQ comparison of zero against the result of a
12807 signed MOD operation whose second operand is a power of 2, make
12808 the MOD operation unsigned since it is simpler and equivalent. */
12809 if (integer_zerop (arg1)
12810 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12811 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12812 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12813 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12814 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12815 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12817 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12818 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12819 fold_convert_loc (loc, newtype,
12820 TREE_OPERAND (arg0, 0)),
12821 fold_convert_loc (loc, newtype,
12822 TREE_OPERAND (arg0, 1)));
12824 return fold_build2_loc (loc, code, type, newmod,
12825 fold_convert_loc (loc, newtype, arg1));
12828 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12829 C1 is a valid shift constant, and C2 is a power of two, i.e.
12830 a single bit. */
12831 if (TREE_CODE (arg0) == BIT_AND_EXPR
12832 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12833 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12834 == INTEGER_CST
12835 && integer_pow2p (TREE_OPERAND (arg0, 1))
12836 && integer_zerop (arg1))
12838 tree itype = TREE_TYPE (arg0);
12839 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12840 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12842 /* Check for a valid shift count. */
12843 if (TREE_INT_CST_HIGH (arg001) == 0
12844 && TREE_INT_CST_LOW (arg001) < prec)
12846 tree arg01 = TREE_OPERAND (arg0, 1);
12847 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12848 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12849 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12850 can be rewritten as (X & (C2 << C1)) != 0. */
12851 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12853 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12854 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12855 return fold_build2_loc (loc, code, type, tem,
12856 fold_convert_loc (loc, itype, arg1));
12858 /* Otherwise, for signed (arithmetic) shifts,
12859 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12860 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12861 else if (!TYPE_UNSIGNED (itype))
12862 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12863 arg000, build_int_cst (itype, 0));
12864 /* Otherwise, of unsigned (logical) shifts,
12865 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12866 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12867 else
12868 return omit_one_operand_loc (loc, type,
12869 code == EQ_EXPR ? integer_one_node
12870 : integer_zero_node,
12871 arg000);
12875 /* If we have (A & C) == C where C is a power of 2, convert this into
12876 (A & C) != 0. Similarly for NE_EXPR. */
12877 if (TREE_CODE (arg0) == BIT_AND_EXPR
12878 && integer_pow2p (TREE_OPERAND (arg0, 1))
12879 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12880 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12881 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12882 integer_zero_node));
12884 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12885 bit, then fold the expression into A < 0 or A >= 0. */
12886 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12887 if (tem)
12888 return tem;
12890 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12891 Similarly for NE_EXPR. */
12892 if (TREE_CODE (arg0) == BIT_AND_EXPR
12893 && TREE_CODE (arg1) == INTEGER_CST
12894 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12896 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12897 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12898 TREE_OPERAND (arg0, 1));
12899 tree dandnotc
12900 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12901 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12902 notc);
12903 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12904 if (integer_nonzerop (dandnotc))
12905 return omit_one_operand_loc (loc, type, rslt, arg0);
12908 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12909 Similarly for NE_EXPR. */
12910 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12911 && TREE_CODE (arg1) == INTEGER_CST
12912 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12914 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12915 tree candnotd
12916 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12917 TREE_OPERAND (arg0, 1),
12918 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12919 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12920 if (integer_nonzerop (candnotd))
12921 return omit_one_operand_loc (loc, type, rslt, arg0);
12924 /* If this is a comparison of a field, we may be able to simplify it. */
12925 if ((TREE_CODE (arg0) == COMPONENT_REF
12926 || TREE_CODE (arg0) == BIT_FIELD_REF)
12927 /* Handle the constant case even without -O
12928 to make sure the warnings are given. */
12929 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12931 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12932 if (t1)
12933 return t1;
12936 /* Optimize comparisons of strlen vs zero to a compare of the
12937 first character of the string vs zero. To wit,
12938 strlen(ptr) == 0 => *ptr == 0
12939 strlen(ptr) != 0 => *ptr != 0
12940 Other cases should reduce to one of these two (or a constant)
12941 due to the return value of strlen being unsigned. */
12942 if (TREE_CODE (arg0) == CALL_EXPR
12943 && integer_zerop (arg1))
12945 tree fndecl = get_callee_fndecl (arg0);
12947 if (fndecl
12948 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12949 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12950 && call_expr_nargs (arg0) == 1
12951 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12953 tree iref = build_fold_indirect_ref_loc (loc,
12954 CALL_EXPR_ARG (arg0, 0));
12955 return fold_build2_loc (loc, code, type, iref,
12956 build_int_cst (TREE_TYPE (iref), 0));
12960 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12961 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12962 if (TREE_CODE (arg0) == RSHIFT_EXPR
12963 && integer_zerop (arg1)
12964 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12966 tree arg00 = TREE_OPERAND (arg0, 0);
12967 tree arg01 = TREE_OPERAND (arg0, 1);
12968 tree itype = TREE_TYPE (arg00);
12969 if (TREE_INT_CST_HIGH (arg01) == 0
12970 && TREE_INT_CST_LOW (arg01)
12971 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12973 if (TYPE_UNSIGNED (itype))
12975 itype = signed_type_for (itype);
12976 arg00 = fold_convert_loc (loc, itype, arg00);
12978 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12979 type, arg00, build_zero_cst (itype));
12983 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12984 if (integer_zerop (arg1)
12985 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12986 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12987 TREE_OPERAND (arg0, 1));
12989 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12990 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12991 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12992 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12993 build_zero_cst (TREE_TYPE (arg0)));
12994 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12995 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12996 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12997 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12998 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12999 build_zero_cst (TREE_TYPE (arg0)));
13001 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13002 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13003 && TREE_CODE (arg1) == INTEGER_CST
13004 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13005 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13006 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13007 TREE_OPERAND (arg0, 1), arg1));
13009 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13010 (X & C) == 0 when C is a single bit. */
13011 if (TREE_CODE (arg0) == BIT_AND_EXPR
13012 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13013 && integer_zerop (arg1)
13014 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13016 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13017 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13018 TREE_OPERAND (arg0, 1));
13019 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13020 type, tem,
13021 fold_convert_loc (loc, TREE_TYPE (arg0),
13022 arg1));
13025 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13026 constant C is a power of two, i.e. a single bit. */
13027 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13028 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13029 && integer_zerop (arg1)
13030 && integer_pow2p (TREE_OPERAND (arg0, 1))
13031 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13032 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13034 tree arg00 = TREE_OPERAND (arg0, 0);
13035 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13036 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13039 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13040 when is C is a power of two, i.e. a single bit. */
13041 if (TREE_CODE (arg0) == BIT_AND_EXPR
13042 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13043 && integer_zerop (arg1)
13044 && integer_pow2p (TREE_OPERAND (arg0, 1))
13045 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13046 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13048 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13049 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13050 arg000, TREE_OPERAND (arg0, 1));
13051 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13052 tem, build_int_cst (TREE_TYPE (tem), 0));
13055 if (integer_zerop (arg1)
13056 && tree_expr_nonzero_p (arg0))
13058 tree res = constant_boolean_node (code==NE_EXPR, type);
13059 return omit_one_operand_loc (loc, type, res, arg0);
13062 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13063 if (TREE_CODE (arg0) == NEGATE_EXPR
13064 && TREE_CODE (arg1) == NEGATE_EXPR)
13065 return fold_build2_loc (loc, code, type,
13066 TREE_OPERAND (arg0, 0),
13067 fold_convert_loc (loc, TREE_TYPE (arg0),
13068 TREE_OPERAND (arg1, 0)));
13070 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13071 if (TREE_CODE (arg0) == BIT_AND_EXPR
13072 && TREE_CODE (arg1) == BIT_AND_EXPR)
13074 tree arg00 = TREE_OPERAND (arg0, 0);
13075 tree arg01 = TREE_OPERAND (arg0, 1);
13076 tree arg10 = TREE_OPERAND (arg1, 0);
13077 tree arg11 = TREE_OPERAND (arg1, 1);
13078 tree itype = TREE_TYPE (arg0);
13080 if (operand_equal_p (arg01, arg11, 0))
13081 return fold_build2_loc (loc, code, type,
13082 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13083 fold_build2_loc (loc,
13084 BIT_XOR_EXPR, itype,
13085 arg00, arg10),
13086 arg01),
13087 build_zero_cst (itype));
13089 if (operand_equal_p (arg01, arg10, 0))
13090 return fold_build2_loc (loc, code, type,
13091 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13092 fold_build2_loc (loc,
13093 BIT_XOR_EXPR, itype,
13094 arg00, arg11),
13095 arg01),
13096 build_zero_cst (itype));
13098 if (operand_equal_p (arg00, arg11, 0))
13099 return fold_build2_loc (loc, code, type,
13100 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13101 fold_build2_loc (loc,
13102 BIT_XOR_EXPR, itype,
13103 arg01, arg10),
13104 arg00),
13105 build_zero_cst (itype));
13107 if (operand_equal_p (arg00, arg10, 0))
13108 return fold_build2_loc (loc, code, type,
13109 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13110 fold_build2_loc (loc,
13111 BIT_XOR_EXPR, itype,
13112 arg01, arg11),
13113 arg00),
13114 build_zero_cst (itype));
13117 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13118 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13120 tree arg00 = TREE_OPERAND (arg0, 0);
13121 tree arg01 = TREE_OPERAND (arg0, 1);
13122 tree arg10 = TREE_OPERAND (arg1, 0);
13123 tree arg11 = TREE_OPERAND (arg1, 1);
13124 tree itype = TREE_TYPE (arg0);
13126 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13127 operand_equal_p guarantees no side-effects so we don't need
13128 to use omit_one_operand on Z. */
13129 if (operand_equal_p (arg01, arg11, 0))
13130 return fold_build2_loc (loc, code, type, arg00,
13131 fold_convert_loc (loc, TREE_TYPE (arg00),
13132 arg10));
13133 if (operand_equal_p (arg01, arg10, 0))
13134 return fold_build2_loc (loc, code, type, arg00,
13135 fold_convert_loc (loc, TREE_TYPE (arg00),
13136 arg11));
13137 if (operand_equal_p (arg00, arg11, 0))
13138 return fold_build2_loc (loc, code, type, arg01,
13139 fold_convert_loc (loc, TREE_TYPE (arg01),
13140 arg10));
13141 if (operand_equal_p (arg00, arg10, 0))
13142 return fold_build2_loc (loc, code, type, arg01,
13143 fold_convert_loc (loc, TREE_TYPE (arg01),
13144 arg11));
13146 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13147 if (TREE_CODE (arg01) == INTEGER_CST
13148 && TREE_CODE (arg11) == INTEGER_CST)
13150 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13151 fold_convert_loc (loc, itype, arg11));
13152 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13153 return fold_build2_loc (loc, code, type, tem,
13154 fold_convert_loc (loc, itype, arg10));
13158 /* Attempt to simplify equality/inequality comparisons of complex
13159 values. Only lower the comparison if the result is known or
13160 can be simplified to a single scalar comparison. */
13161 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13162 || TREE_CODE (arg0) == COMPLEX_CST)
13163 && (TREE_CODE (arg1) == COMPLEX_EXPR
13164 || TREE_CODE (arg1) == COMPLEX_CST))
13166 tree real0, imag0, real1, imag1;
13167 tree rcond, icond;
13169 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13171 real0 = TREE_OPERAND (arg0, 0);
13172 imag0 = TREE_OPERAND (arg0, 1);
13174 else
13176 real0 = TREE_REALPART (arg0);
13177 imag0 = TREE_IMAGPART (arg0);
13180 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13182 real1 = TREE_OPERAND (arg1, 0);
13183 imag1 = TREE_OPERAND (arg1, 1);
13185 else
13187 real1 = TREE_REALPART (arg1);
13188 imag1 = TREE_IMAGPART (arg1);
13191 rcond = fold_binary_loc (loc, code, type, real0, real1);
13192 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13194 if (integer_zerop (rcond))
13196 if (code == EQ_EXPR)
13197 return omit_two_operands_loc (loc, type, boolean_false_node,
13198 imag0, imag1);
13199 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13201 else
13203 if (code == NE_EXPR)
13204 return omit_two_operands_loc (loc, type, boolean_true_node,
13205 imag0, imag1);
13206 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13210 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13211 if (icond && TREE_CODE (icond) == INTEGER_CST)
13213 if (integer_zerop (icond))
13215 if (code == EQ_EXPR)
13216 return omit_two_operands_loc (loc, type, boolean_false_node,
13217 real0, real1);
13218 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13220 else
13222 if (code == NE_EXPR)
13223 return omit_two_operands_loc (loc, type, boolean_true_node,
13224 real0, real1);
13225 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13230 return NULL_TREE;
13232 case LT_EXPR:
13233 case GT_EXPR:
13234 case LE_EXPR:
13235 case GE_EXPR:
13236 tem = fold_comparison (loc, code, type, op0, op1);
13237 if (tem != NULL_TREE)
13238 return tem;
13240 /* Transform comparisons of the form X +- C CMP X. */
13241 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13242 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13243 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13244 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13245 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13246 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13248 tree arg01 = TREE_OPERAND (arg0, 1);
13249 enum tree_code code0 = TREE_CODE (arg0);
13250 int is_positive;
13252 if (TREE_CODE (arg01) == REAL_CST)
13253 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13254 else
13255 is_positive = tree_int_cst_sgn (arg01);
13257 /* (X - c) > X becomes false. */
13258 if (code == GT_EXPR
13259 && ((code0 == MINUS_EXPR && is_positive >= 0)
13260 || (code0 == PLUS_EXPR && is_positive <= 0)))
13262 if (TREE_CODE (arg01) == INTEGER_CST
13263 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13264 fold_overflow_warning (("assuming signed overflow does not "
13265 "occur when assuming that (X - c) > X "
13266 "is always false"),
13267 WARN_STRICT_OVERFLOW_ALL);
13268 return constant_boolean_node (0, type);
13271 /* Likewise (X + c) < X becomes false. */
13272 if (code == LT_EXPR
13273 && ((code0 == PLUS_EXPR && is_positive >= 0)
13274 || (code0 == MINUS_EXPR && is_positive <= 0)))
13276 if (TREE_CODE (arg01) == INTEGER_CST
13277 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13278 fold_overflow_warning (("assuming signed overflow does not "
13279 "occur when assuming that "
13280 "(X + c) < X is always false"),
13281 WARN_STRICT_OVERFLOW_ALL);
13282 return constant_boolean_node (0, type);
13285 /* Convert (X - c) <= X to true. */
13286 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13287 && code == LE_EXPR
13288 && ((code0 == MINUS_EXPR && is_positive >= 0)
13289 || (code0 == PLUS_EXPR && is_positive <= 0)))
13291 if (TREE_CODE (arg01) == INTEGER_CST
13292 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13293 fold_overflow_warning (("assuming signed overflow does not "
13294 "occur when assuming that "
13295 "(X - c) <= X is always true"),
13296 WARN_STRICT_OVERFLOW_ALL);
13297 return constant_boolean_node (1, type);
13300 /* Convert (X + c) >= X to true. */
13301 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13302 && code == GE_EXPR
13303 && ((code0 == PLUS_EXPR && is_positive >= 0)
13304 || (code0 == MINUS_EXPR && is_positive <= 0)))
13306 if (TREE_CODE (arg01) == INTEGER_CST
13307 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13308 fold_overflow_warning (("assuming signed overflow does not "
13309 "occur when assuming that "
13310 "(X + c) >= X is always true"),
13311 WARN_STRICT_OVERFLOW_ALL);
13312 return constant_boolean_node (1, type);
13315 if (TREE_CODE (arg01) == INTEGER_CST)
13317 /* Convert X + c > X and X - c < X to true for integers. */
13318 if (code == GT_EXPR
13319 && ((code0 == PLUS_EXPR && is_positive > 0)
13320 || (code0 == MINUS_EXPR && is_positive < 0)))
13322 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13323 fold_overflow_warning (("assuming signed overflow does "
13324 "not occur when assuming that "
13325 "(X + c) > X is always true"),
13326 WARN_STRICT_OVERFLOW_ALL);
13327 return constant_boolean_node (1, type);
13330 if (code == LT_EXPR
13331 && ((code0 == MINUS_EXPR && is_positive > 0)
13332 || (code0 == PLUS_EXPR && is_positive < 0)))
13334 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13335 fold_overflow_warning (("assuming signed overflow does "
13336 "not occur when assuming that "
13337 "(X - c) < X is always true"),
13338 WARN_STRICT_OVERFLOW_ALL);
13339 return constant_boolean_node (1, type);
13342 /* Convert X + c <= X and X - c >= X to false for integers. */
13343 if (code == LE_EXPR
13344 && ((code0 == PLUS_EXPR && is_positive > 0)
13345 || (code0 == MINUS_EXPR && is_positive < 0)))
13347 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13348 fold_overflow_warning (("assuming signed overflow does "
13349 "not occur when assuming that "
13350 "(X + c) <= X is always false"),
13351 WARN_STRICT_OVERFLOW_ALL);
13352 return constant_boolean_node (0, type);
13355 if (code == GE_EXPR
13356 && ((code0 == MINUS_EXPR && is_positive > 0)
13357 || (code0 == PLUS_EXPR && is_positive < 0)))
13359 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13360 fold_overflow_warning (("assuming signed overflow does "
13361 "not occur when assuming that "
13362 "(X - c) >= X is always false"),
13363 WARN_STRICT_OVERFLOW_ALL);
13364 return constant_boolean_node (0, type);
13369 /* Comparisons with the highest or lowest possible integer of
13370 the specified precision will have known values. */
13372 tree arg1_type = TREE_TYPE (arg1);
13373 unsigned int width = TYPE_PRECISION (arg1_type);
13375 if (TREE_CODE (arg1) == INTEGER_CST
13376 && width <= HOST_BITS_PER_DOUBLE_INT
13377 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13379 HOST_WIDE_INT signed_max_hi;
13380 unsigned HOST_WIDE_INT signed_max_lo;
13381 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13383 if (width <= HOST_BITS_PER_WIDE_INT)
13385 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13386 - 1;
13387 signed_max_hi = 0;
13388 max_hi = 0;
13390 if (TYPE_UNSIGNED (arg1_type))
13392 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13393 min_lo = 0;
13394 min_hi = 0;
13396 else
13398 max_lo = signed_max_lo;
13399 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13400 min_hi = -1;
13403 else
13405 width -= HOST_BITS_PER_WIDE_INT;
13406 signed_max_lo = -1;
13407 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13408 - 1;
13409 max_lo = -1;
13410 min_lo = 0;
13412 if (TYPE_UNSIGNED (arg1_type))
13414 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13415 min_hi = 0;
13417 else
13419 max_hi = signed_max_hi;
13420 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13424 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13425 && TREE_INT_CST_LOW (arg1) == max_lo)
13426 switch (code)
13428 case GT_EXPR:
13429 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13431 case GE_EXPR:
13432 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13434 case LE_EXPR:
13435 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13437 case LT_EXPR:
13438 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13440 /* The GE_EXPR and LT_EXPR cases above are not normally
13441 reached because of previous transformations. */
13443 default:
13444 break;
13446 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13447 == max_hi
13448 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13449 switch (code)
13451 case GT_EXPR:
13452 arg1 = const_binop (PLUS_EXPR, arg1,
13453 build_int_cst (TREE_TYPE (arg1), 1));
13454 return fold_build2_loc (loc, EQ_EXPR, type,
13455 fold_convert_loc (loc,
13456 TREE_TYPE (arg1), arg0),
13457 arg1);
13458 case LE_EXPR:
13459 arg1 = const_binop (PLUS_EXPR, arg1,
13460 build_int_cst (TREE_TYPE (arg1), 1));
13461 return fold_build2_loc (loc, NE_EXPR, type,
13462 fold_convert_loc (loc, TREE_TYPE (arg1),
13463 arg0),
13464 arg1);
13465 default:
13466 break;
13468 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13469 == min_hi
13470 && TREE_INT_CST_LOW (arg1) == min_lo)
13471 switch (code)
13473 case LT_EXPR:
13474 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13476 case LE_EXPR:
13477 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13479 case GE_EXPR:
13480 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13482 case GT_EXPR:
13483 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13485 default:
13486 break;
13488 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13489 == min_hi
13490 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13491 switch (code)
13493 case GE_EXPR:
13494 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13495 return fold_build2_loc (loc, NE_EXPR, type,
13496 fold_convert_loc (loc,
13497 TREE_TYPE (arg1), arg0),
13498 arg1);
13499 case LT_EXPR:
13500 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13501 return fold_build2_loc (loc, EQ_EXPR, type,
13502 fold_convert_loc (loc, TREE_TYPE (arg1),
13503 arg0),
13504 arg1);
13505 default:
13506 break;
13509 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13510 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13511 && TYPE_UNSIGNED (arg1_type)
13512 /* We will flip the signedness of the comparison operator
13513 associated with the mode of arg1, so the sign bit is
13514 specified by this mode. Check that arg1 is the signed
13515 max associated with this sign bit. */
13516 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13517 /* signed_type does not work on pointer types. */
13518 && INTEGRAL_TYPE_P (arg1_type))
13520 /* The following case also applies to X < signed_max+1
13521 and X >= signed_max+1 because previous transformations. */
13522 if (code == LE_EXPR || code == GT_EXPR)
13524 tree st;
13525 st = signed_type_for (TREE_TYPE (arg1));
13526 return fold_build2_loc (loc,
13527 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13528 type, fold_convert_loc (loc, st, arg0),
13529 build_int_cst (st, 0));
13535 /* If we are comparing an ABS_EXPR with a constant, we can
13536 convert all the cases into explicit comparisons, but they may
13537 well not be faster than doing the ABS and one comparison.
13538 But ABS (X) <= C is a range comparison, which becomes a subtraction
13539 and a comparison, and is probably faster. */
13540 if (code == LE_EXPR
13541 && TREE_CODE (arg1) == INTEGER_CST
13542 && TREE_CODE (arg0) == ABS_EXPR
13543 && ! TREE_SIDE_EFFECTS (arg0)
13544 && (0 != (tem = negate_expr (arg1)))
13545 && TREE_CODE (tem) == INTEGER_CST
13546 && !TREE_OVERFLOW (tem))
13547 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13548 build2 (GE_EXPR, type,
13549 TREE_OPERAND (arg0, 0), tem),
13550 build2 (LE_EXPR, type,
13551 TREE_OPERAND (arg0, 0), arg1));
13553 /* Convert ABS_EXPR<x> >= 0 to true. */
13554 strict_overflow_p = false;
13555 if (code == GE_EXPR
13556 && (integer_zerop (arg1)
13557 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13558 && real_zerop (arg1)))
13559 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13561 if (strict_overflow_p)
13562 fold_overflow_warning (("assuming signed overflow does not occur "
13563 "when simplifying comparison of "
13564 "absolute value and zero"),
13565 WARN_STRICT_OVERFLOW_CONDITIONAL);
13566 return omit_one_operand_loc (loc, type,
13567 constant_boolean_node (true, type),
13568 arg0);
13571 /* Convert ABS_EXPR<x> < 0 to false. */
13572 strict_overflow_p = false;
13573 if (code == LT_EXPR
13574 && (integer_zerop (arg1) || real_zerop (arg1))
13575 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13577 if (strict_overflow_p)
13578 fold_overflow_warning (("assuming signed overflow does not occur "
13579 "when simplifying comparison of "
13580 "absolute value and zero"),
13581 WARN_STRICT_OVERFLOW_CONDITIONAL);
13582 return omit_one_operand_loc (loc, type,
13583 constant_boolean_node (false, type),
13584 arg0);
13587 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13588 and similarly for >= into !=. */
13589 if ((code == LT_EXPR || code == GE_EXPR)
13590 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13591 && TREE_CODE (arg1) == LSHIFT_EXPR
13592 && integer_onep (TREE_OPERAND (arg1, 0)))
13593 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13594 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13595 TREE_OPERAND (arg1, 1)),
13596 build_zero_cst (TREE_TYPE (arg0)));
13598 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13599 otherwise Y might be >= # of bits in X's type and thus e.g.
13600 (unsigned char) (1 << Y) for Y 15 might be 0.
13601 If the cast is widening, then 1 << Y should have unsigned type,
13602 otherwise if Y is number of bits in the signed shift type minus 1,
13603 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13604 31 might be 0xffffffff80000000. */
13605 if ((code == LT_EXPR || code == GE_EXPR)
13606 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13607 && CONVERT_EXPR_P (arg1)
13608 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13609 && (TYPE_PRECISION (TREE_TYPE (arg1))
13610 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13611 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13612 || (TYPE_PRECISION (TREE_TYPE (arg1))
13613 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13614 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13616 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13617 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13618 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13619 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13620 build_zero_cst (TREE_TYPE (arg0)));
13623 return NULL_TREE;
13625 case UNORDERED_EXPR:
13626 case ORDERED_EXPR:
13627 case UNLT_EXPR:
13628 case UNLE_EXPR:
13629 case UNGT_EXPR:
13630 case UNGE_EXPR:
13631 case UNEQ_EXPR:
13632 case LTGT_EXPR:
13633 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13635 t1 = fold_relational_const (code, type, arg0, arg1);
13636 if (t1 != NULL_TREE)
13637 return t1;
13640 /* If the first operand is NaN, the result is constant. */
13641 if (TREE_CODE (arg0) == REAL_CST
13642 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13643 && (code != LTGT_EXPR || ! flag_trapping_math))
13645 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13646 ? integer_zero_node
13647 : integer_one_node;
13648 return omit_one_operand_loc (loc, type, t1, arg1);
13651 /* If the second operand is NaN, the result is constant. */
13652 if (TREE_CODE (arg1) == REAL_CST
13653 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13654 && (code != LTGT_EXPR || ! flag_trapping_math))
13656 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13657 ? integer_zero_node
13658 : integer_one_node;
13659 return omit_one_operand_loc (loc, type, t1, arg0);
13662 /* Simplify unordered comparison of something with itself. */
13663 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13664 && operand_equal_p (arg0, arg1, 0))
13665 return constant_boolean_node (1, type);
13667 if (code == LTGT_EXPR
13668 && !flag_trapping_math
13669 && operand_equal_p (arg0, arg1, 0))
13670 return constant_boolean_node (0, type);
13672 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13674 tree targ0 = strip_float_extensions (arg0);
13675 tree targ1 = strip_float_extensions (arg1);
13676 tree newtype = TREE_TYPE (targ0);
13678 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13679 newtype = TREE_TYPE (targ1);
13681 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13682 return fold_build2_loc (loc, code, type,
13683 fold_convert_loc (loc, newtype, targ0),
13684 fold_convert_loc (loc, newtype, targ1));
13687 return NULL_TREE;
13689 case COMPOUND_EXPR:
13690 /* When pedantic, a compound expression can be neither an lvalue
13691 nor an integer constant expression. */
13692 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13693 return NULL_TREE;
13694 /* Don't let (0, 0) be null pointer constant. */
13695 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13696 : fold_convert_loc (loc, type, arg1);
13697 return pedantic_non_lvalue_loc (loc, tem);
13699 case COMPLEX_EXPR:
13700 if ((TREE_CODE (arg0) == REAL_CST
13701 && TREE_CODE (arg1) == REAL_CST)
13702 || (TREE_CODE (arg0) == INTEGER_CST
13703 && TREE_CODE (arg1) == INTEGER_CST))
13704 return build_complex (type, arg0, arg1);
13705 if (TREE_CODE (arg0) == REALPART_EXPR
13706 && TREE_CODE (arg1) == IMAGPART_EXPR
13707 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13708 && operand_equal_p (TREE_OPERAND (arg0, 0),
13709 TREE_OPERAND (arg1, 0), 0))
13710 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13711 TREE_OPERAND (arg1, 0));
13712 return NULL_TREE;
13714 case ASSERT_EXPR:
13715 /* An ASSERT_EXPR should never be passed to fold_binary. */
13716 gcc_unreachable ();
13718 case VEC_PACK_TRUNC_EXPR:
13719 case VEC_PACK_FIX_TRUNC_EXPR:
13721 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13722 tree *elts;
13724 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13725 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13726 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13727 return NULL_TREE;
13729 elts = XALLOCAVEC (tree, nelts);
13730 if (!vec_cst_ctor_to_array (arg0, elts)
13731 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13732 return NULL_TREE;
13734 for (i = 0; i < nelts; i++)
13736 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13737 ? NOP_EXPR : FIX_TRUNC_EXPR,
13738 TREE_TYPE (type), elts[i]);
13739 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13740 return NULL_TREE;
13743 return build_vector (type, elts);
13746 case VEC_WIDEN_MULT_LO_EXPR:
13747 case VEC_WIDEN_MULT_HI_EXPR:
13748 case VEC_WIDEN_MULT_EVEN_EXPR:
13749 case VEC_WIDEN_MULT_ODD_EXPR:
13751 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13752 unsigned int out, ofs, scale;
13753 tree *elts;
13755 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13756 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13757 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13758 return NULL_TREE;
13760 elts = XALLOCAVEC (tree, nelts * 4);
13761 if (!vec_cst_ctor_to_array (arg0, elts)
13762 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13763 return NULL_TREE;
13765 if (code == VEC_WIDEN_MULT_LO_EXPR)
13766 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13767 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13768 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13769 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13770 scale = 1, ofs = 0;
13771 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13772 scale = 1, ofs = 1;
13774 for (out = 0; out < nelts; out++)
13776 unsigned int in1 = (out << scale) + ofs;
13777 unsigned int in2 = in1 + nelts * 2;
13778 tree t1, t2;
13780 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13781 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13783 if (t1 == NULL_TREE || t2 == NULL_TREE)
13784 return NULL_TREE;
13785 elts[out] = const_binop (MULT_EXPR, t1, t2);
13786 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13787 return NULL_TREE;
13790 return build_vector (type, elts);
13793 default:
13794 return NULL_TREE;
13795 } /* switch (code) */
13798 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13799 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13800 of GOTO_EXPR. */
13802 static tree
13803 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13805 switch (TREE_CODE (*tp))
13807 case LABEL_EXPR:
13808 return *tp;
13810 case GOTO_EXPR:
13811 *walk_subtrees = 0;
13813 /* ... fall through ... */
13815 default:
13816 return NULL_TREE;
13820 /* Return whether the sub-tree ST contains a label which is accessible from
13821 outside the sub-tree. */
13823 static bool
13824 contains_label_p (tree st)
13826 return
13827 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13830 /* Fold a ternary expression of code CODE and type TYPE with operands
13831 OP0, OP1, and OP2. Return the folded expression if folding is
13832 successful. Otherwise, return NULL_TREE. */
13834 tree
13835 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13836 tree op0, tree op1, tree op2)
13838 tree tem;
13839 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13840 enum tree_code_class kind = TREE_CODE_CLASS (code);
13842 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13843 && TREE_CODE_LENGTH (code) == 3);
13845 /* Strip any conversions that don't change the mode. This is safe
13846 for every expression, except for a comparison expression because
13847 its signedness is derived from its operands. So, in the latter
13848 case, only strip conversions that don't change the signedness.
13850 Note that this is done as an internal manipulation within the
13851 constant folder, in order to find the simplest representation of
13852 the arguments so that their form can be studied. In any cases,
13853 the appropriate type conversions should be put back in the tree
13854 that will get out of the constant folder. */
13855 if (op0)
13857 arg0 = op0;
13858 STRIP_NOPS (arg0);
13861 if (op1)
13863 arg1 = op1;
13864 STRIP_NOPS (arg1);
13867 if (op2)
13869 arg2 = op2;
13870 STRIP_NOPS (arg2);
13873 switch (code)
13875 case COMPONENT_REF:
13876 if (TREE_CODE (arg0) == CONSTRUCTOR
13877 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13879 unsigned HOST_WIDE_INT idx;
13880 tree field, value;
13881 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13882 if (field == arg1)
13883 return value;
13885 return NULL_TREE;
13887 case COND_EXPR:
13888 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13889 so all simple results must be passed through pedantic_non_lvalue. */
13890 if (TREE_CODE (arg0) == INTEGER_CST)
13892 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13893 tem = integer_zerop (arg0) ? op2 : op1;
13894 /* Only optimize constant conditions when the selected branch
13895 has the same type as the COND_EXPR. This avoids optimizing
13896 away "c ? x : throw", where the throw has a void type.
13897 Avoid throwing away that operand which contains label. */
13898 if ((!TREE_SIDE_EFFECTS (unused_op)
13899 || !contains_label_p (unused_op))
13900 && (! VOID_TYPE_P (TREE_TYPE (tem))
13901 || VOID_TYPE_P (type)))
13902 return pedantic_non_lvalue_loc (loc, tem);
13903 return NULL_TREE;
13905 if (operand_equal_p (arg1, op2, 0))
13906 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13908 /* If we have A op B ? A : C, we may be able to convert this to a
13909 simpler expression, depending on the operation and the values
13910 of B and C. Signed zeros prevent all of these transformations,
13911 for reasons given above each one.
13913 Also try swapping the arguments and inverting the conditional. */
13914 if (COMPARISON_CLASS_P (arg0)
13915 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13916 arg1, TREE_OPERAND (arg0, 1))
13917 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13919 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13920 if (tem)
13921 return tem;
13924 if (COMPARISON_CLASS_P (arg0)
13925 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13926 op2,
13927 TREE_OPERAND (arg0, 1))
13928 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13930 location_t loc0 = expr_location_or (arg0, loc);
13931 tem = fold_truth_not_expr (loc0, arg0);
13932 if (tem && COMPARISON_CLASS_P (tem))
13934 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13935 if (tem)
13936 return tem;
13940 /* If the second operand is simpler than the third, swap them
13941 since that produces better jump optimization results. */
13942 if (truth_value_p (TREE_CODE (arg0))
13943 && tree_swap_operands_p (op1, op2, false))
13945 location_t loc0 = expr_location_or (arg0, loc);
13946 /* See if this can be inverted. If it can't, possibly because
13947 it was a floating-point inequality comparison, don't do
13948 anything. */
13949 tem = fold_truth_not_expr (loc0, arg0);
13950 if (tem)
13951 return fold_build3_loc (loc, code, type, tem, op2, op1);
13954 /* Convert A ? 1 : 0 to simply A. */
13955 if (integer_onep (op1)
13956 && integer_zerop (op2)
13957 /* If we try to convert OP0 to our type, the
13958 call to fold will try to move the conversion inside
13959 a COND, which will recurse. In that case, the COND_EXPR
13960 is probably the best choice, so leave it alone. */
13961 && type == TREE_TYPE (arg0))
13962 return pedantic_non_lvalue_loc (loc, arg0);
13964 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13965 over COND_EXPR in cases such as floating point comparisons. */
13966 if (integer_zerop (op1)
13967 && integer_onep (op2)
13968 && truth_value_p (TREE_CODE (arg0)))
13969 return pedantic_non_lvalue_loc (loc,
13970 fold_convert_loc (loc, type,
13971 invert_truthvalue_loc (loc,
13972 arg0)));
13974 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13975 if (TREE_CODE (arg0) == LT_EXPR
13976 && integer_zerop (TREE_OPERAND (arg0, 1))
13977 && integer_zerop (op2)
13978 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13980 /* sign_bit_p only checks ARG1 bits within A's precision.
13981 If <sign bit of A> has wider type than A, bits outside
13982 of A's precision in <sign bit of A> need to be checked.
13983 If they are all 0, this optimization needs to be done
13984 in unsigned A's type, if they are all 1 in signed A's type,
13985 otherwise this can't be done. */
13986 if (TYPE_PRECISION (TREE_TYPE (tem))
13987 < TYPE_PRECISION (TREE_TYPE (arg1))
13988 && TYPE_PRECISION (TREE_TYPE (tem))
13989 < TYPE_PRECISION (type))
13991 unsigned HOST_WIDE_INT mask_lo;
13992 HOST_WIDE_INT mask_hi;
13993 int inner_width, outer_width;
13994 tree tem_type;
13996 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13997 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13998 if (outer_width > TYPE_PRECISION (type))
13999 outer_width = TYPE_PRECISION (type);
14001 if (outer_width > HOST_BITS_PER_WIDE_INT)
14003 mask_hi = ((unsigned HOST_WIDE_INT) -1
14004 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14005 mask_lo = -1;
14007 else
14009 mask_hi = 0;
14010 mask_lo = ((unsigned HOST_WIDE_INT) -1
14011 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14013 if (inner_width > HOST_BITS_PER_WIDE_INT)
14015 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
14016 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14017 mask_lo = 0;
14019 else
14020 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
14021 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14023 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14024 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14026 tem_type = signed_type_for (TREE_TYPE (tem));
14027 tem = fold_convert_loc (loc, tem_type, tem);
14029 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14030 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14032 tem_type = unsigned_type_for (TREE_TYPE (tem));
14033 tem = fold_convert_loc (loc, tem_type, tem);
14035 else
14036 tem = NULL;
14039 if (tem)
14040 return
14041 fold_convert_loc (loc, type,
14042 fold_build2_loc (loc, BIT_AND_EXPR,
14043 TREE_TYPE (tem), tem,
14044 fold_convert_loc (loc,
14045 TREE_TYPE (tem),
14046 arg1)));
14049 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14050 already handled above. */
14051 if (TREE_CODE (arg0) == BIT_AND_EXPR
14052 && integer_onep (TREE_OPERAND (arg0, 1))
14053 && integer_zerop (op2)
14054 && integer_pow2p (arg1))
14056 tree tem = TREE_OPERAND (arg0, 0);
14057 STRIP_NOPS (tem);
14058 if (TREE_CODE (tem) == RSHIFT_EXPR
14059 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14060 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14061 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14062 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14063 TREE_OPERAND (tem, 0), arg1);
14066 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14067 is probably obsolete because the first operand should be a
14068 truth value (that's why we have the two cases above), but let's
14069 leave it in until we can confirm this for all front-ends. */
14070 if (integer_zerop (op2)
14071 && TREE_CODE (arg0) == NE_EXPR
14072 && integer_zerop (TREE_OPERAND (arg0, 1))
14073 && integer_pow2p (arg1)
14074 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14075 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14076 arg1, OEP_ONLY_CONST))
14077 return pedantic_non_lvalue_loc (loc,
14078 fold_convert_loc (loc, type,
14079 TREE_OPERAND (arg0, 0)));
14081 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14082 if (integer_zerop (op2)
14083 && truth_value_p (TREE_CODE (arg0))
14084 && truth_value_p (TREE_CODE (arg1)))
14085 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14086 fold_convert_loc (loc, type, arg0),
14087 arg1);
14089 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14090 if (integer_onep (op2)
14091 && truth_value_p (TREE_CODE (arg0))
14092 && truth_value_p (TREE_CODE (arg1)))
14094 location_t loc0 = expr_location_or (arg0, loc);
14095 /* Only perform transformation if ARG0 is easily inverted. */
14096 tem = fold_truth_not_expr (loc0, arg0);
14097 if (tem)
14098 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14099 fold_convert_loc (loc, type, tem),
14100 arg1);
14103 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14104 if (integer_zerop (arg1)
14105 && truth_value_p (TREE_CODE (arg0))
14106 && truth_value_p (TREE_CODE (op2)))
14108 location_t loc0 = expr_location_or (arg0, loc);
14109 /* Only perform transformation if ARG0 is easily inverted. */
14110 tem = fold_truth_not_expr (loc0, arg0);
14111 if (tem)
14112 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14113 fold_convert_loc (loc, type, tem),
14114 op2);
14117 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14118 if (integer_onep (arg1)
14119 && truth_value_p (TREE_CODE (arg0))
14120 && truth_value_p (TREE_CODE (op2)))
14121 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14122 fold_convert_loc (loc, type, arg0),
14123 op2);
14125 return NULL_TREE;
14127 case VEC_COND_EXPR:
14128 if (TREE_CODE (arg0) == VECTOR_CST)
14130 if (integer_all_onesp (arg0) && !TREE_SIDE_EFFECTS (op2))
14131 return pedantic_non_lvalue_loc (loc, op1);
14132 if (integer_zerop (arg0) && !TREE_SIDE_EFFECTS (op1))
14133 return pedantic_non_lvalue_loc (loc, op2);
14135 return NULL_TREE;
14137 case CALL_EXPR:
14138 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14139 of fold_ternary on them. */
14140 gcc_unreachable ();
14142 case BIT_FIELD_REF:
14143 if ((TREE_CODE (arg0) == VECTOR_CST
14144 || (TREE_CODE (arg0) == CONSTRUCTOR
14145 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14146 && (type == TREE_TYPE (TREE_TYPE (arg0))
14147 || (TREE_CODE (type) == VECTOR_TYPE
14148 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14150 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14151 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14152 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14153 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14155 if (n != 0
14156 && (idx % width) == 0
14157 && (n % width) == 0
14158 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14160 idx = idx / width;
14161 n = n / width;
14163 if (TREE_CODE (arg0) == VECTOR_CST)
14165 if (n == 1)
14166 return VECTOR_CST_ELT (arg0, idx);
14168 tree *vals = XALLOCAVEC (tree, n);
14169 for (unsigned i = 0; i < n; ++i)
14170 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14171 return build_vector (type, vals);
14174 /* Constructor elements can be subvectors. */
14175 unsigned HOST_WIDE_INT k = 1;
14176 if (CONSTRUCTOR_NELTS (arg0) != 0)
14178 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14179 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14180 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14183 /* We keep an exact subset of the constructor elements. */
14184 if ((idx % k) == 0 && (n % k) == 0)
14186 if (CONSTRUCTOR_NELTS (arg0) == 0)
14187 return build_constructor (type, NULL);
14188 idx /= k;
14189 n /= k;
14190 if (n == 1)
14192 if (idx < CONSTRUCTOR_NELTS (arg0))
14193 return CONSTRUCTOR_ELT (arg0, idx)->value;
14194 return build_zero_cst (type);
14197 vec<constructor_elt, va_gc> *vals;
14198 vec_alloc (vals, n);
14199 for (unsigned i = 0;
14200 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14201 ++i)
14202 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14203 CONSTRUCTOR_ELT
14204 (arg0, idx + i)->value);
14205 return build_constructor (type, vals);
14207 /* The bitfield references a single constructor element. */
14208 else if (idx + n <= (idx / k + 1) * k)
14210 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14211 return build_zero_cst (type);
14212 else if (n == k)
14213 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14214 else
14215 return fold_build3_loc (loc, code, type,
14216 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14217 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14222 /* A bit-field-ref that referenced the full argument can be stripped. */
14223 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14224 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14225 && integer_zerop (op2))
14226 return fold_convert_loc (loc, type, arg0);
14228 /* On constants we can use native encode/interpret to constant
14229 fold (nearly) all BIT_FIELD_REFs. */
14230 if (CONSTANT_CLASS_P (arg0)
14231 && can_native_interpret_type_p (type)
14232 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14233 /* This limitation should not be necessary, we just need to
14234 round this up to mode size. */
14235 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14236 /* Need bit-shifting of the buffer to relax the following. */
14237 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14239 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14240 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14241 unsigned HOST_WIDE_INT clen;
14242 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14243 /* ??? We cannot tell native_encode_expr to start at
14244 some random byte only. So limit us to a reasonable amount
14245 of work. */
14246 if (clen <= 4096)
14248 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14249 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14250 if (len > 0
14251 && len * BITS_PER_UNIT >= bitpos + bitsize)
14253 tree v = native_interpret_expr (type,
14254 b + bitpos / BITS_PER_UNIT,
14255 bitsize / BITS_PER_UNIT);
14256 if (v)
14257 return v;
14262 return NULL_TREE;
14264 case FMA_EXPR:
14265 /* For integers we can decompose the FMA if possible. */
14266 if (TREE_CODE (arg0) == INTEGER_CST
14267 && TREE_CODE (arg1) == INTEGER_CST)
14268 return fold_build2_loc (loc, PLUS_EXPR, type,
14269 const_binop (MULT_EXPR, arg0, arg1), arg2);
14270 if (integer_zerop (arg2))
14271 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14273 return fold_fma (loc, type, arg0, arg1, arg2);
14275 case VEC_PERM_EXPR:
14276 if (TREE_CODE (arg2) == VECTOR_CST)
14278 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14279 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14280 tree t;
14281 bool need_mask_canon = false;
14282 bool all_in_vec0 = true;
14283 bool all_in_vec1 = true;
14284 bool maybe_identity = true;
14285 bool single_arg = (op0 == op1);
14286 bool changed = false;
14288 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14289 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14290 for (i = 0; i < nelts; i++)
14292 tree val = VECTOR_CST_ELT (arg2, i);
14293 if (TREE_CODE (val) != INTEGER_CST)
14294 return NULL_TREE;
14296 sel[i] = TREE_INT_CST_LOW (val) & mask;
14297 if (TREE_INT_CST_HIGH (val)
14298 || ((unsigned HOST_WIDE_INT)
14299 TREE_INT_CST_LOW (val) != sel[i]))
14300 need_mask_canon = true;
14302 if (sel[i] < nelts)
14303 all_in_vec1 = false;
14304 else
14305 all_in_vec0 = false;
14307 if ((sel[i] & (nelts-1)) != i)
14308 maybe_identity = false;
14311 if (maybe_identity)
14313 if (all_in_vec0)
14314 return op0;
14315 if (all_in_vec1)
14316 return op1;
14319 if (all_in_vec0)
14320 op1 = op0;
14321 else if (all_in_vec1)
14323 op0 = op1;
14324 for (i = 0; i < nelts; i++)
14325 sel[i] -= nelts;
14326 need_mask_canon = true;
14329 if ((TREE_CODE (op0) == VECTOR_CST
14330 || TREE_CODE (op0) == CONSTRUCTOR)
14331 && (TREE_CODE (op1) == VECTOR_CST
14332 || TREE_CODE (op1) == CONSTRUCTOR))
14334 t = fold_vec_perm (type, op0, op1, sel);
14335 if (t != NULL_TREE)
14336 return t;
14339 if (op0 == op1 && !single_arg)
14340 changed = true;
14342 if (need_mask_canon && arg2 == op2)
14344 tree *tsel = XALLOCAVEC (tree, nelts);
14345 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14346 for (i = 0; i < nelts; i++)
14347 tsel[i] = build_int_cst (eltype, sel[i]);
14348 op2 = build_vector (TREE_TYPE (arg2), tsel);
14349 changed = true;
14352 if (changed)
14353 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14355 return NULL_TREE;
14357 default:
14358 return NULL_TREE;
14359 } /* switch (code) */
14362 /* Perform constant folding and related simplification of EXPR.
14363 The related simplifications include x*1 => x, x*0 => 0, etc.,
14364 and application of the associative law.
14365 NOP_EXPR conversions may be removed freely (as long as we
14366 are careful not to change the type of the overall expression).
14367 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14368 but we can constant-fold them if they have constant operands. */
14370 #ifdef ENABLE_FOLD_CHECKING
14371 # define fold(x) fold_1 (x)
14372 static tree fold_1 (tree);
14373 static
14374 #endif
14375 tree
14376 fold (tree expr)
14378 const tree t = expr;
14379 enum tree_code code = TREE_CODE (t);
14380 enum tree_code_class kind = TREE_CODE_CLASS (code);
14381 tree tem;
14382 location_t loc = EXPR_LOCATION (expr);
14384 /* Return right away if a constant. */
14385 if (kind == tcc_constant)
14386 return t;
14388 /* CALL_EXPR-like objects with variable numbers of operands are
14389 treated specially. */
14390 if (kind == tcc_vl_exp)
14392 if (code == CALL_EXPR)
14394 tem = fold_call_expr (loc, expr, false);
14395 return tem ? tem : expr;
14397 return expr;
14400 if (IS_EXPR_CODE_CLASS (kind))
14402 tree type = TREE_TYPE (t);
14403 tree op0, op1, op2;
14405 switch (TREE_CODE_LENGTH (code))
14407 case 1:
14408 op0 = TREE_OPERAND (t, 0);
14409 tem = fold_unary_loc (loc, code, type, op0);
14410 return tem ? tem : expr;
14411 case 2:
14412 op0 = TREE_OPERAND (t, 0);
14413 op1 = TREE_OPERAND (t, 1);
14414 tem = fold_binary_loc (loc, code, type, op0, op1);
14415 return tem ? tem : expr;
14416 case 3:
14417 op0 = TREE_OPERAND (t, 0);
14418 op1 = TREE_OPERAND (t, 1);
14419 op2 = TREE_OPERAND (t, 2);
14420 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14421 return tem ? tem : expr;
14422 default:
14423 break;
14427 switch (code)
14429 case ARRAY_REF:
14431 tree op0 = TREE_OPERAND (t, 0);
14432 tree op1 = TREE_OPERAND (t, 1);
14434 if (TREE_CODE (op1) == INTEGER_CST
14435 && TREE_CODE (op0) == CONSTRUCTOR
14436 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14438 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14439 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14440 unsigned HOST_WIDE_INT begin = 0;
14442 /* Find a matching index by means of a binary search. */
14443 while (begin != end)
14445 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14446 tree index = (*elts)[middle].index;
14448 if (TREE_CODE (index) == INTEGER_CST
14449 && tree_int_cst_lt (index, op1))
14450 begin = middle + 1;
14451 else if (TREE_CODE (index) == INTEGER_CST
14452 && tree_int_cst_lt (op1, index))
14453 end = middle;
14454 else if (TREE_CODE (index) == RANGE_EXPR
14455 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14456 begin = middle + 1;
14457 else if (TREE_CODE (index) == RANGE_EXPR
14458 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14459 end = middle;
14460 else
14461 return (*elts)[middle].value;
14465 return t;
14468 /* Return a VECTOR_CST if possible. */
14469 case CONSTRUCTOR:
14471 tree type = TREE_TYPE (t);
14472 if (TREE_CODE (type) != VECTOR_TYPE)
14473 return t;
14475 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14476 unsigned HOST_WIDE_INT idx, pos = 0;
14477 tree value;
14479 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14481 if (!CONSTANT_CLASS_P (value))
14482 return t;
14483 if (TREE_CODE (value) == VECTOR_CST)
14485 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14486 vec[pos++] = VECTOR_CST_ELT (value, i);
14488 else
14489 vec[pos++] = value;
14491 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14492 vec[pos] = build_zero_cst (TREE_TYPE (type));
14494 return build_vector (type, vec);
14497 case CONST_DECL:
14498 return fold (DECL_INITIAL (t));
14500 default:
14501 return t;
14502 } /* switch (code) */
14505 #ifdef ENABLE_FOLD_CHECKING
14506 #undef fold
14508 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14509 hash_table <pointer_hash <tree_node> >);
14510 static void fold_check_failed (const_tree, const_tree);
14511 void print_fold_checksum (const_tree);
14513 /* When --enable-checking=fold, compute a digest of expr before
14514 and after actual fold call to see if fold did not accidentally
14515 change original expr. */
14517 tree
14518 fold (tree expr)
14520 tree ret;
14521 struct md5_ctx ctx;
14522 unsigned char checksum_before[16], checksum_after[16];
14523 hash_table <pointer_hash <tree_node> > ht;
14525 ht.create (32);
14526 md5_init_ctx (&ctx);
14527 fold_checksum_tree (expr, &ctx, ht);
14528 md5_finish_ctx (&ctx, checksum_before);
14529 ht.empty ();
14531 ret = fold_1 (expr);
14533 md5_init_ctx (&ctx);
14534 fold_checksum_tree (expr, &ctx, ht);
14535 md5_finish_ctx (&ctx, checksum_after);
14536 ht.dispose ();
14538 if (memcmp (checksum_before, checksum_after, 16))
14539 fold_check_failed (expr, ret);
14541 return ret;
14544 void
14545 print_fold_checksum (const_tree expr)
14547 struct md5_ctx ctx;
14548 unsigned char checksum[16], cnt;
14549 hash_table <pointer_hash <tree_node> > ht;
14551 ht.create (32);
14552 md5_init_ctx (&ctx);
14553 fold_checksum_tree (expr, &ctx, ht);
14554 md5_finish_ctx (&ctx, checksum);
14555 ht.dispose ();
14556 for (cnt = 0; cnt < 16; ++cnt)
14557 fprintf (stderr, "%02x", checksum[cnt]);
14558 putc ('\n', stderr);
14561 static void
14562 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14564 internal_error ("fold check: original tree changed by fold");
14567 static void
14568 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14569 hash_table <pointer_hash <tree_node> > ht)
14571 tree_node **slot;
14572 enum tree_code code;
14573 union tree_node buf;
14574 int i, len;
14576 recursive_label:
14577 if (expr == NULL)
14578 return;
14579 slot = ht.find_slot (expr, INSERT);
14580 if (*slot != NULL)
14581 return;
14582 *slot = CONST_CAST_TREE (expr);
14583 code = TREE_CODE (expr);
14584 if (TREE_CODE_CLASS (code) == tcc_declaration
14585 && DECL_ASSEMBLER_NAME_SET_P (expr))
14587 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14588 memcpy ((char *) &buf, expr, tree_size (expr));
14589 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14590 expr = (tree) &buf;
14592 else if (TREE_CODE_CLASS (code) == tcc_type
14593 && (TYPE_POINTER_TO (expr)
14594 || TYPE_REFERENCE_TO (expr)
14595 || TYPE_CACHED_VALUES_P (expr)
14596 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14597 || TYPE_NEXT_VARIANT (expr)))
14599 /* Allow these fields to be modified. */
14600 tree tmp;
14601 memcpy ((char *) &buf, expr, tree_size (expr));
14602 expr = tmp = (tree) &buf;
14603 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14604 TYPE_POINTER_TO (tmp) = NULL;
14605 TYPE_REFERENCE_TO (tmp) = NULL;
14606 TYPE_NEXT_VARIANT (tmp) = NULL;
14607 if (TYPE_CACHED_VALUES_P (tmp))
14609 TYPE_CACHED_VALUES_P (tmp) = 0;
14610 TYPE_CACHED_VALUES (tmp) = NULL;
14613 md5_process_bytes (expr, tree_size (expr), ctx);
14614 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14615 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14616 if (TREE_CODE_CLASS (code) != tcc_type
14617 && TREE_CODE_CLASS (code) != tcc_declaration
14618 && code != TREE_LIST
14619 && code != SSA_NAME
14620 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14621 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14622 switch (TREE_CODE_CLASS (code))
14624 case tcc_constant:
14625 switch (code)
14627 case STRING_CST:
14628 md5_process_bytes (TREE_STRING_POINTER (expr),
14629 TREE_STRING_LENGTH (expr), ctx);
14630 break;
14631 case COMPLEX_CST:
14632 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14633 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14634 break;
14635 case VECTOR_CST:
14636 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14637 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14638 break;
14639 default:
14640 break;
14642 break;
14643 case tcc_exceptional:
14644 switch (code)
14646 case TREE_LIST:
14647 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14648 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14649 expr = TREE_CHAIN (expr);
14650 goto recursive_label;
14651 break;
14652 case TREE_VEC:
14653 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14654 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14655 break;
14656 default:
14657 break;
14659 break;
14660 case tcc_expression:
14661 case tcc_reference:
14662 case tcc_comparison:
14663 case tcc_unary:
14664 case tcc_binary:
14665 case tcc_statement:
14666 case tcc_vl_exp:
14667 len = TREE_OPERAND_LENGTH (expr);
14668 for (i = 0; i < len; ++i)
14669 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14670 break;
14671 case tcc_declaration:
14672 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14673 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14674 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14676 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14677 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14678 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14679 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14680 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14682 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14683 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14685 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14687 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14688 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14689 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14691 break;
14692 case tcc_type:
14693 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14694 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14695 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14696 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14697 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14698 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14699 if (INTEGRAL_TYPE_P (expr)
14700 || SCALAR_FLOAT_TYPE_P (expr))
14702 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14703 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14705 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14706 if (TREE_CODE (expr) == RECORD_TYPE
14707 || TREE_CODE (expr) == UNION_TYPE
14708 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14709 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14710 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14711 break;
14712 default:
14713 break;
14717 /* Helper function for outputting the checksum of a tree T. When
14718 debugging with gdb, you can "define mynext" to be "next" followed
14719 by "call debug_fold_checksum (op0)", then just trace down till the
14720 outputs differ. */
14722 DEBUG_FUNCTION void
14723 debug_fold_checksum (const_tree t)
14725 int i;
14726 unsigned char checksum[16];
14727 struct md5_ctx ctx;
14728 hash_table <pointer_hash <tree_node> > ht;
14729 ht.create (32);
14731 md5_init_ctx (&ctx);
14732 fold_checksum_tree (t, &ctx, ht);
14733 md5_finish_ctx (&ctx, checksum);
14734 ht.empty ();
14736 for (i = 0; i < 16; i++)
14737 fprintf (stderr, "%d ", checksum[i]);
14739 fprintf (stderr, "\n");
14742 #endif
14744 /* Fold a unary tree expression with code CODE of type TYPE with an
14745 operand OP0. LOC is the location of the resulting expression.
14746 Return a folded expression if successful. Otherwise, return a tree
14747 expression with code CODE of type TYPE with an operand OP0. */
14749 tree
14750 fold_build1_stat_loc (location_t loc,
14751 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14753 tree tem;
14754 #ifdef ENABLE_FOLD_CHECKING
14755 unsigned char checksum_before[16], checksum_after[16];
14756 struct md5_ctx ctx;
14757 hash_table <pointer_hash <tree_node> > ht;
14759 ht.create (32);
14760 md5_init_ctx (&ctx);
14761 fold_checksum_tree (op0, &ctx, ht);
14762 md5_finish_ctx (&ctx, checksum_before);
14763 ht.empty ();
14764 #endif
14766 tem = fold_unary_loc (loc, code, type, op0);
14767 if (!tem)
14768 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14770 #ifdef ENABLE_FOLD_CHECKING
14771 md5_init_ctx (&ctx);
14772 fold_checksum_tree (op0, &ctx, ht);
14773 md5_finish_ctx (&ctx, checksum_after);
14774 ht.dispose ();
14776 if (memcmp (checksum_before, checksum_after, 16))
14777 fold_check_failed (op0, tem);
14778 #endif
14779 return tem;
14782 /* Fold a binary tree expression with code CODE of type TYPE with
14783 operands OP0 and OP1. LOC is the location of the resulting
14784 expression. Return a folded expression if successful. Otherwise,
14785 return a tree expression with code CODE of type TYPE with operands
14786 OP0 and OP1. */
14788 tree
14789 fold_build2_stat_loc (location_t loc,
14790 enum tree_code code, tree type, tree op0, tree op1
14791 MEM_STAT_DECL)
14793 tree tem;
14794 #ifdef ENABLE_FOLD_CHECKING
14795 unsigned char checksum_before_op0[16],
14796 checksum_before_op1[16],
14797 checksum_after_op0[16],
14798 checksum_after_op1[16];
14799 struct md5_ctx ctx;
14800 hash_table <pointer_hash <tree_node> > ht;
14802 ht.create (32);
14803 md5_init_ctx (&ctx);
14804 fold_checksum_tree (op0, &ctx, ht);
14805 md5_finish_ctx (&ctx, checksum_before_op0);
14806 ht.empty ();
14808 md5_init_ctx (&ctx);
14809 fold_checksum_tree (op1, &ctx, ht);
14810 md5_finish_ctx (&ctx, checksum_before_op1);
14811 ht.empty ();
14812 #endif
14814 tem = fold_binary_loc (loc, code, type, op0, op1);
14815 if (!tem)
14816 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14818 #ifdef ENABLE_FOLD_CHECKING
14819 md5_init_ctx (&ctx);
14820 fold_checksum_tree (op0, &ctx, ht);
14821 md5_finish_ctx (&ctx, checksum_after_op0);
14822 ht.empty ();
14824 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14825 fold_check_failed (op0, tem);
14827 md5_init_ctx (&ctx);
14828 fold_checksum_tree (op1, &ctx, ht);
14829 md5_finish_ctx (&ctx, checksum_after_op1);
14830 ht.dispose ();
14832 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14833 fold_check_failed (op1, tem);
14834 #endif
14835 return tem;
14838 /* Fold a ternary tree expression with code CODE of type TYPE with
14839 operands OP0, OP1, and OP2. Return a folded expression if
14840 successful. Otherwise, return a tree expression with code CODE of
14841 type TYPE with operands OP0, OP1, and OP2. */
14843 tree
14844 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14845 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14847 tree tem;
14848 #ifdef ENABLE_FOLD_CHECKING
14849 unsigned char checksum_before_op0[16],
14850 checksum_before_op1[16],
14851 checksum_before_op2[16],
14852 checksum_after_op0[16],
14853 checksum_after_op1[16],
14854 checksum_after_op2[16];
14855 struct md5_ctx ctx;
14856 hash_table <pointer_hash <tree_node> > ht;
14858 ht.create (32);
14859 md5_init_ctx (&ctx);
14860 fold_checksum_tree (op0, &ctx, ht);
14861 md5_finish_ctx (&ctx, checksum_before_op0);
14862 ht.empty ();
14864 md5_init_ctx (&ctx);
14865 fold_checksum_tree (op1, &ctx, ht);
14866 md5_finish_ctx (&ctx, checksum_before_op1);
14867 ht.empty ();
14869 md5_init_ctx (&ctx);
14870 fold_checksum_tree (op2, &ctx, ht);
14871 md5_finish_ctx (&ctx, checksum_before_op2);
14872 ht.empty ();
14873 #endif
14875 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14876 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14877 if (!tem)
14878 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14880 #ifdef ENABLE_FOLD_CHECKING
14881 md5_init_ctx (&ctx);
14882 fold_checksum_tree (op0, &ctx, ht);
14883 md5_finish_ctx (&ctx, checksum_after_op0);
14884 ht.empty ();
14886 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14887 fold_check_failed (op0, tem);
14889 md5_init_ctx (&ctx);
14890 fold_checksum_tree (op1, &ctx, ht);
14891 md5_finish_ctx (&ctx, checksum_after_op1);
14892 ht.empty ();
14894 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14895 fold_check_failed (op1, tem);
14897 md5_init_ctx (&ctx);
14898 fold_checksum_tree (op2, &ctx, ht);
14899 md5_finish_ctx (&ctx, checksum_after_op2);
14900 ht.dispose ();
14902 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14903 fold_check_failed (op2, tem);
14904 #endif
14905 return tem;
14908 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14909 arguments in ARGARRAY, and a null static chain.
14910 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14911 of type TYPE from the given operands as constructed by build_call_array. */
14913 tree
14914 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14915 int nargs, tree *argarray)
14917 tree tem;
14918 #ifdef ENABLE_FOLD_CHECKING
14919 unsigned char checksum_before_fn[16],
14920 checksum_before_arglist[16],
14921 checksum_after_fn[16],
14922 checksum_after_arglist[16];
14923 struct md5_ctx ctx;
14924 hash_table <pointer_hash <tree_node> > ht;
14925 int i;
14927 ht.create (32);
14928 md5_init_ctx (&ctx);
14929 fold_checksum_tree (fn, &ctx, ht);
14930 md5_finish_ctx (&ctx, checksum_before_fn);
14931 ht.empty ();
14933 md5_init_ctx (&ctx);
14934 for (i = 0; i < nargs; i++)
14935 fold_checksum_tree (argarray[i], &ctx, ht);
14936 md5_finish_ctx (&ctx, checksum_before_arglist);
14937 ht.empty ();
14938 #endif
14940 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14942 #ifdef ENABLE_FOLD_CHECKING
14943 md5_init_ctx (&ctx);
14944 fold_checksum_tree (fn, &ctx, ht);
14945 md5_finish_ctx (&ctx, checksum_after_fn);
14946 ht.empty ();
14948 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14949 fold_check_failed (fn, tem);
14951 md5_init_ctx (&ctx);
14952 for (i = 0; i < nargs; i++)
14953 fold_checksum_tree (argarray[i], &ctx, ht);
14954 md5_finish_ctx (&ctx, checksum_after_arglist);
14955 ht.dispose ();
14957 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14958 fold_check_failed (NULL_TREE, tem);
14959 #endif
14960 return tem;
14963 /* Perform constant folding and related simplification of initializer
14964 expression EXPR. These behave identically to "fold_buildN" but ignore
14965 potential run-time traps and exceptions that fold must preserve. */
14967 #define START_FOLD_INIT \
14968 int saved_signaling_nans = flag_signaling_nans;\
14969 int saved_trapping_math = flag_trapping_math;\
14970 int saved_rounding_math = flag_rounding_math;\
14971 int saved_trapv = flag_trapv;\
14972 int saved_folding_initializer = folding_initializer;\
14973 flag_signaling_nans = 0;\
14974 flag_trapping_math = 0;\
14975 flag_rounding_math = 0;\
14976 flag_trapv = 0;\
14977 folding_initializer = 1;
14979 #define END_FOLD_INIT \
14980 flag_signaling_nans = saved_signaling_nans;\
14981 flag_trapping_math = saved_trapping_math;\
14982 flag_rounding_math = saved_rounding_math;\
14983 flag_trapv = saved_trapv;\
14984 folding_initializer = saved_folding_initializer;
14986 tree
14987 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14988 tree type, tree op)
14990 tree result;
14991 START_FOLD_INIT;
14993 result = fold_build1_loc (loc, code, type, op);
14995 END_FOLD_INIT;
14996 return result;
14999 tree
15000 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15001 tree type, tree op0, tree op1)
15003 tree result;
15004 START_FOLD_INIT;
15006 result = fold_build2_loc (loc, code, type, op0, op1);
15008 END_FOLD_INIT;
15009 return result;
15012 tree
15013 fold_build3_initializer_loc (location_t loc, enum tree_code code,
15014 tree type, tree op0, tree op1, tree op2)
15016 tree result;
15017 START_FOLD_INIT;
15019 result = fold_build3_loc (loc, code, type, op0, op1, op2);
15021 END_FOLD_INIT;
15022 return result;
15025 tree
15026 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15027 int nargs, tree *argarray)
15029 tree result;
15030 START_FOLD_INIT;
15032 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15034 END_FOLD_INIT;
15035 return result;
15038 #undef START_FOLD_INIT
15039 #undef END_FOLD_INIT
15041 /* Determine if first argument is a multiple of second argument. Return 0 if
15042 it is not, or we cannot easily determined it to be.
15044 An example of the sort of thing we care about (at this point; this routine
15045 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15046 fold cases do now) is discovering that
15048 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15050 is a multiple of
15052 SAVE_EXPR (J * 8)
15054 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15056 This code also handles discovering that
15058 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15060 is a multiple of 8 so we don't have to worry about dealing with a
15061 possible remainder.
15063 Note that we *look* inside a SAVE_EXPR only to determine how it was
15064 calculated; it is not safe for fold to do much of anything else with the
15065 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15066 at run time. For example, the latter example above *cannot* be implemented
15067 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15068 evaluation time of the original SAVE_EXPR is not necessarily the same at
15069 the time the new expression is evaluated. The only optimization of this
15070 sort that would be valid is changing
15072 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15074 divided by 8 to
15076 SAVE_EXPR (I) * SAVE_EXPR (J)
15078 (where the same SAVE_EXPR (J) is used in the original and the
15079 transformed version). */
15082 multiple_of_p (tree type, const_tree top, const_tree bottom)
15084 if (operand_equal_p (top, bottom, 0))
15085 return 1;
15087 if (TREE_CODE (type) != INTEGER_TYPE)
15088 return 0;
15090 switch (TREE_CODE (top))
15092 case BIT_AND_EXPR:
15093 /* Bitwise and provides a power of two multiple. If the mask is
15094 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15095 if (!integer_pow2p (bottom))
15096 return 0;
15097 /* FALLTHRU */
15099 case MULT_EXPR:
15100 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15101 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15103 case PLUS_EXPR:
15104 case MINUS_EXPR:
15105 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15106 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15108 case LSHIFT_EXPR:
15109 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15111 tree op1, t1;
15113 op1 = TREE_OPERAND (top, 1);
15114 /* const_binop may not detect overflow correctly,
15115 so check for it explicitly here. */
15116 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15117 > TREE_INT_CST_LOW (op1)
15118 && TREE_INT_CST_HIGH (op1) == 0
15119 && 0 != (t1 = fold_convert (type,
15120 const_binop (LSHIFT_EXPR,
15121 size_one_node,
15122 op1)))
15123 && !TREE_OVERFLOW (t1))
15124 return multiple_of_p (type, t1, bottom);
15126 return 0;
15128 case NOP_EXPR:
15129 /* Can't handle conversions from non-integral or wider integral type. */
15130 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15131 || (TYPE_PRECISION (type)
15132 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15133 return 0;
15135 /* .. fall through ... */
15137 case SAVE_EXPR:
15138 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15140 case COND_EXPR:
15141 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15142 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15144 case INTEGER_CST:
15145 if (TREE_CODE (bottom) != INTEGER_CST
15146 || integer_zerop (bottom)
15147 || (TYPE_UNSIGNED (type)
15148 && (tree_int_cst_sgn (top) < 0
15149 || tree_int_cst_sgn (bottom) < 0)))
15150 return 0;
15151 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15152 top, bottom));
15154 default:
15155 return 0;
15159 /* Return true if CODE or TYPE is known to be non-negative. */
15161 static bool
15162 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15164 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15165 && truth_value_p (code))
15166 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15167 have a signed:1 type (where the value is -1 and 0). */
15168 return true;
15169 return false;
15172 /* Return true if (CODE OP0) is known to be non-negative. If the return
15173 value is based on the assumption that signed overflow is undefined,
15174 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15175 *STRICT_OVERFLOW_P. */
15177 bool
15178 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15179 bool *strict_overflow_p)
15181 if (TYPE_UNSIGNED (type))
15182 return true;
15184 switch (code)
15186 case ABS_EXPR:
15187 /* We can't return 1 if flag_wrapv is set because
15188 ABS_EXPR<INT_MIN> = INT_MIN. */
15189 if (!INTEGRAL_TYPE_P (type))
15190 return true;
15191 if (TYPE_OVERFLOW_UNDEFINED (type))
15193 *strict_overflow_p = true;
15194 return true;
15196 break;
15198 case NON_LVALUE_EXPR:
15199 case FLOAT_EXPR:
15200 case FIX_TRUNC_EXPR:
15201 return tree_expr_nonnegative_warnv_p (op0,
15202 strict_overflow_p);
15204 case NOP_EXPR:
15206 tree inner_type = TREE_TYPE (op0);
15207 tree outer_type = type;
15209 if (TREE_CODE (outer_type) == REAL_TYPE)
15211 if (TREE_CODE (inner_type) == REAL_TYPE)
15212 return tree_expr_nonnegative_warnv_p (op0,
15213 strict_overflow_p);
15214 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15216 if (TYPE_UNSIGNED (inner_type))
15217 return true;
15218 return tree_expr_nonnegative_warnv_p (op0,
15219 strict_overflow_p);
15222 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15224 if (TREE_CODE (inner_type) == REAL_TYPE)
15225 return tree_expr_nonnegative_warnv_p (op0,
15226 strict_overflow_p);
15227 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15228 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15229 && TYPE_UNSIGNED (inner_type);
15232 break;
15234 default:
15235 return tree_simple_nonnegative_warnv_p (code, type);
15238 /* We don't know sign of `t', so be conservative and return false. */
15239 return false;
15242 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15243 value is based on the assumption that signed overflow is undefined,
15244 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15245 *STRICT_OVERFLOW_P. */
15247 bool
15248 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15249 tree op1, bool *strict_overflow_p)
15251 if (TYPE_UNSIGNED (type))
15252 return true;
15254 switch (code)
15256 case POINTER_PLUS_EXPR:
15257 case PLUS_EXPR:
15258 if (FLOAT_TYPE_P (type))
15259 return (tree_expr_nonnegative_warnv_p (op0,
15260 strict_overflow_p)
15261 && tree_expr_nonnegative_warnv_p (op1,
15262 strict_overflow_p));
15264 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15265 both unsigned and at least 2 bits shorter than the result. */
15266 if (TREE_CODE (type) == INTEGER_TYPE
15267 && TREE_CODE (op0) == NOP_EXPR
15268 && TREE_CODE (op1) == NOP_EXPR)
15270 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15271 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15272 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15273 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15275 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15276 TYPE_PRECISION (inner2)) + 1;
15277 return prec < TYPE_PRECISION (type);
15280 break;
15282 case MULT_EXPR:
15283 if (FLOAT_TYPE_P (type))
15285 /* x * x for floating point x is always non-negative. */
15286 if (operand_equal_p (op0, op1, 0))
15287 return true;
15288 return (tree_expr_nonnegative_warnv_p (op0,
15289 strict_overflow_p)
15290 && tree_expr_nonnegative_warnv_p (op1,
15291 strict_overflow_p));
15294 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15295 both unsigned and their total bits is shorter than the result. */
15296 if (TREE_CODE (type) == INTEGER_TYPE
15297 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15298 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15300 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15301 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15302 : TREE_TYPE (op0);
15303 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15304 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15305 : TREE_TYPE (op1);
15307 bool unsigned0 = TYPE_UNSIGNED (inner0);
15308 bool unsigned1 = TYPE_UNSIGNED (inner1);
15310 if (TREE_CODE (op0) == INTEGER_CST)
15311 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15313 if (TREE_CODE (op1) == INTEGER_CST)
15314 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15316 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15317 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15319 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15320 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15321 : TYPE_PRECISION (inner0);
15323 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15324 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15325 : TYPE_PRECISION (inner1);
15327 return precision0 + precision1 < TYPE_PRECISION (type);
15330 return false;
15332 case BIT_AND_EXPR:
15333 case MAX_EXPR:
15334 return (tree_expr_nonnegative_warnv_p (op0,
15335 strict_overflow_p)
15336 || tree_expr_nonnegative_warnv_p (op1,
15337 strict_overflow_p));
15339 case BIT_IOR_EXPR:
15340 case BIT_XOR_EXPR:
15341 case MIN_EXPR:
15342 case RDIV_EXPR:
15343 case TRUNC_DIV_EXPR:
15344 case CEIL_DIV_EXPR:
15345 case FLOOR_DIV_EXPR:
15346 case ROUND_DIV_EXPR:
15347 return (tree_expr_nonnegative_warnv_p (op0,
15348 strict_overflow_p)
15349 && tree_expr_nonnegative_warnv_p (op1,
15350 strict_overflow_p));
15352 case TRUNC_MOD_EXPR:
15353 case CEIL_MOD_EXPR:
15354 case FLOOR_MOD_EXPR:
15355 case ROUND_MOD_EXPR:
15356 return tree_expr_nonnegative_warnv_p (op0,
15357 strict_overflow_p);
15358 default:
15359 return tree_simple_nonnegative_warnv_p (code, type);
15362 /* We don't know sign of `t', so be conservative and return false. */
15363 return false;
15366 /* Return true if T is known to be non-negative. If the return
15367 value is based on the assumption that signed overflow is undefined,
15368 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15369 *STRICT_OVERFLOW_P. */
15371 bool
15372 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15374 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15375 return true;
15377 switch (TREE_CODE (t))
15379 case INTEGER_CST:
15380 return tree_int_cst_sgn (t) >= 0;
15382 case REAL_CST:
15383 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15385 case FIXED_CST:
15386 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15388 case COND_EXPR:
15389 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15390 strict_overflow_p)
15391 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15392 strict_overflow_p));
15393 default:
15394 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15395 TREE_TYPE (t));
15397 /* We don't know sign of `t', so be conservative and return false. */
15398 return false;
15401 /* Return true if T is known to be non-negative. If the return
15402 value is based on the assumption that signed overflow is undefined,
15403 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15404 *STRICT_OVERFLOW_P. */
15406 bool
15407 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15408 tree arg0, tree arg1, bool *strict_overflow_p)
15410 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15411 switch (DECL_FUNCTION_CODE (fndecl))
15413 CASE_FLT_FN (BUILT_IN_ACOS):
15414 CASE_FLT_FN (BUILT_IN_ACOSH):
15415 CASE_FLT_FN (BUILT_IN_CABS):
15416 CASE_FLT_FN (BUILT_IN_COSH):
15417 CASE_FLT_FN (BUILT_IN_ERFC):
15418 CASE_FLT_FN (BUILT_IN_EXP):
15419 CASE_FLT_FN (BUILT_IN_EXP10):
15420 CASE_FLT_FN (BUILT_IN_EXP2):
15421 CASE_FLT_FN (BUILT_IN_FABS):
15422 CASE_FLT_FN (BUILT_IN_FDIM):
15423 CASE_FLT_FN (BUILT_IN_HYPOT):
15424 CASE_FLT_FN (BUILT_IN_POW10):
15425 CASE_INT_FN (BUILT_IN_FFS):
15426 CASE_INT_FN (BUILT_IN_PARITY):
15427 CASE_INT_FN (BUILT_IN_POPCOUNT):
15428 case BUILT_IN_BSWAP32:
15429 case BUILT_IN_BSWAP64:
15430 /* Always true. */
15431 return true;
15433 CASE_FLT_FN (BUILT_IN_SQRT):
15434 /* sqrt(-0.0) is -0.0. */
15435 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15436 return true;
15437 return tree_expr_nonnegative_warnv_p (arg0,
15438 strict_overflow_p);
15440 CASE_FLT_FN (BUILT_IN_ASINH):
15441 CASE_FLT_FN (BUILT_IN_ATAN):
15442 CASE_FLT_FN (BUILT_IN_ATANH):
15443 CASE_FLT_FN (BUILT_IN_CBRT):
15444 CASE_FLT_FN (BUILT_IN_CEIL):
15445 CASE_FLT_FN (BUILT_IN_ERF):
15446 CASE_FLT_FN (BUILT_IN_EXPM1):
15447 CASE_FLT_FN (BUILT_IN_FLOOR):
15448 CASE_FLT_FN (BUILT_IN_FMOD):
15449 CASE_FLT_FN (BUILT_IN_FREXP):
15450 CASE_FLT_FN (BUILT_IN_ICEIL):
15451 CASE_FLT_FN (BUILT_IN_IFLOOR):
15452 CASE_FLT_FN (BUILT_IN_IRINT):
15453 CASE_FLT_FN (BUILT_IN_IROUND):
15454 CASE_FLT_FN (BUILT_IN_LCEIL):
15455 CASE_FLT_FN (BUILT_IN_LDEXP):
15456 CASE_FLT_FN (BUILT_IN_LFLOOR):
15457 CASE_FLT_FN (BUILT_IN_LLCEIL):
15458 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15459 CASE_FLT_FN (BUILT_IN_LLRINT):
15460 CASE_FLT_FN (BUILT_IN_LLROUND):
15461 CASE_FLT_FN (BUILT_IN_LRINT):
15462 CASE_FLT_FN (BUILT_IN_LROUND):
15463 CASE_FLT_FN (BUILT_IN_MODF):
15464 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15465 CASE_FLT_FN (BUILT_IN_RINT):
15466 CASE_FLT_FN (BUILT_IN_ROUND):
15467 CASE_FLT_FN (BUILT_IN_SCALB):
15468 CASE_FLT_FN (BUILT_IN_SCALBLN):
15469 CASE_FLT_FN (BUILT_IN_SCALBN):
15470 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15471 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15472 CASE_FLT_FN (BUILT_IN_SINH):
15473 CASE_FLT_FN (BUILT_IN_TANH):
15474 CASE_FLT_FN (BUILT_IN_TRUNC):
15475 /* True if the 1st argument is nonnegative. */
15476 return tree_expr_nonnegative_warnv_p (arg0,
15477 strict_overflow_p);
15479 CASE_FLT_FN (BUILT_IN_FMAX):
15480 /* True if the 1st OR 2nd arguments are nonnegative. */
15481 return (tree_expr_nonnegative_warnv_p (arg0,
15482 strict_overflow_p)
15483 || (tree_expr_nonnegative_warnv_p (arg1,
15484 strict_overflow_p)));
15486 CASE_FLT_FN (BUILT_IN_FMIN):
15487 /* True if the 1st AND 2nd arguments are nonnegative. */
15488 return (tree_expr_nonnegative_warnv_p (arg0,
15489 strict_overflow_p)
15490 && (tree_expr_nonnegative_warnv_p (arg1,
15491 strict_overflow_p)));
15493 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15494 /* True if the 2nd argument is nonnegative. */
15495 return tree_expr_nonnegative_warnv_p (arg1,
15496 strict_overflow_p);
15498 CASE_FLT_FN (BUILT_IN_POWI):
15499 /* True if the 1st argument is nonnegative or the second
15500 argument is an even integer. */
15501 if (TREE_CODE (arg1) == INTEGER_CST
15502 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15503 return true;
15504 return tree_expr_nonnegative_warnv_p (arg0,
15505 strict_overflow_p);
15507 CASE_FLT_FN (BUILT_IN_POW):
15508 /* True if the 1st argument is nonnegative or the second
15509 argument is an even integer valued real. */
15510 if (TREE_CODE (arg1) == REAL_CST)
15512 REAL_VALUE_TYPE c;
15513 HOST_WIDE_INT n;
15515 c = TREE_REAL_CST (arg1);
15516 n = real_to_integer (&c);
15517 if ((n & 1) == 0)
15519 REAL_VALUE_TYPE cint;
15520 real_from_integer (&cint, VOIDmode, n,
15521 n < 0 ? -1 : 0, 0);
15522 if (real_identical (&c, &cint))
15523 return true;
15526 return tree_expr_nonnegative_warnv_p (arg0,
15527 strict_overflow_p);
15529 default:
15530 break;
15532 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15533 type);
15536 /* Return true if T is known to be non-negative. If the return
15537 value is based on the assumption that signed overflow is undefined,
15538 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15539 *STRICT_OVERFLOW_P. */
15541 bool
15542 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15544 enum tree_code code = TREE_CODE (t);
15545 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15546 return true;
15548 switch (code)
15550 case TARGET_EXPR:
15552 tree temp = TARGET_EXPR_SLOT (t);
15553 t = TARGET_EXPR_INITIAL (t);
15555 /* If the initializer is non-void, then it's a normal expression
15556 that will be assigned to the slot. */
15557 if (!VOID_TYPE_P (t))
15558 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15560 /* Otherwise, the initializer sets the slot in some way. One common
15561 way is an assignment statement at the end of the initializer. */
15562 while (1)
15564 if (TREE_CODE (t) == BIND_EXPR)
15565 t = expr_last (BIND_EXPR_BODY (t));
15566 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15567 || TREE_CODE (t) == TRY_CATCH_EXPR)
15568 t = expr_last (TREE_OPERAND (t, 0));
15569 else if (TREE_CODE (t) == STATEMENT_LIST)
15570 t = expr_last (t);
15571 else
15572 break;
15574 if (TREE_CODE (t) == MODIFY_EXPR
15575 && TREE_OPERAND (t, 0) == temp)
15576 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15577 strict_overflow_p);
15579 return false;
15582 case CALL_EXPR:
15584 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15585 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15587 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15588 get_callee_fndecl (t),
15589 arg0,
15590 arg1,
15591 strict_overflow_p);
15593 case COMPOUND_EXPR:
15594 case MODIFY_EXPR:
15595 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15596 strict_overflow_p);
15597 case BIND_EXPR:
15598 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15599 strict_overflow_p);
15600 case SAVE_EXPR:
15601 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15602 strict_overflow_p);
15604 default:
15605 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15606 TREE_TYPE (t));
15609 /* We don't know sign of `t', so be conservative and return false. */
15610 return false;
15613 /* Return true if T is known to be non-negative. If the return
15614 value is based on the assumption that signed overflow is undefined,
15615 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15616 *STRICT_OVERFLOW_P. */
15618 bool
15619 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15621 enum tree_code code;
15622 if (t == error_mark_node)
15623 return false;
15625 code = TREE_CODE (t);
15626 switch (TREE_CODE_CLASS (code))
15628 case tcc_binary:
15629 case tcc_comparison:
15630 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15631 TREE_TYPE (t),
15632 TREE_OPERAND (t, 0),
15633 TREE_OPERAND (t, 1),
15634 strict_overflow_p);
15636 case tcc_unary:
15637 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15638 TREE_TYPE (t),
15639 TREE_OPERAND (t, 0),
15640 strict_overflow_p);
15642 case tcc_constant:
15643 case tcc_declaration:
15644 case tcc_reference:
15645 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15647 default:
15648 break;
15651 switch (code)
15653 case TRUTH_AND_EXPR:
15654 case TRUTH_OR_EXPR:
15655 case TRUTH_XOR_EXPR:
15656 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15657 TREE_TYPE (t),
15658 TREE_OPERAND (t, 0),
15659 TREE_OPERAND (t, 1),
15660 strict_overflow_p);
15661 case TRUTH_NOT_EXPR:
15662 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15663 TREE_TYPE (t),
15664 TREE_OPERAND (t, 0),
15665 strict_overflow_p);
15667 case COND_EXPR:
15668 case CONSTRUCTOR:
15669 case OBJ_TYPE_REF:
15670 case ASSERT_EXPR:
15671 case ADDR_EXPR:
15672 case WITH_SIZE_EXPR:
15673 case SSA_NAME:
15674 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15676 default:
15677 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15681 /* Return true if `t' is known to be non-negative. Handle warnings
15682 about undefined signed overflow. */
15684 bool
15685 tree_expr_nonnegative_p (tree t)
15687 bool ret, strict_overflow_p;
15689 strict_overflow_p = false;
15690 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15691 if (strict_overflow_p)
15692 fold_overflow_warning (("assuming signed overflow does not occur when "
15693 "determining that expression is always "
15694 "non-negative"),
15695 WARN_STRICT_OVERFLOW_MISC);
15696 return ret;
15700 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15701 For floating point we further ensure that T is not denormal.
15702 Similar logic is present in nonzero_address in rtlanal.h.
15704 If the return value is based on the assumption that signed overflow
15705 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15706 change *STRICT_OVERFLOW_P. */
15708 bool
15709 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15710 bool *strict_overflow_p)
15712 switch (code)
15714 case ABS_EXPR:
15715 return tree_expr_nonzero_warnv_p (op0,
15716 strict_overflow_p);
15718 case NOP_EXPR:
15720 tree inner_type = TREE_TYPE (op0);
15721 tree outer_type = type;
15723 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15724 && tree_expr_nonzero_warnv_p (op0,
15725 strict_overflow_p));
15727 break;
15729 case NON_LVALUE_EXPR:
15730 return tree_expr_nonzero_warnv_p (op0,
15731 strict_overflow_p);
15733 default:
15734 break;
15737 return false;
15740 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15741 For floating point we further ensure that T is not denormal.
15742 Similar logic is present in nonzero_address in rtlanal.h.
15744 If the return value is based on the assumption that signed overflow
15745 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15746 change *STRICT_OVERFLOW_P. */
15748 bool
15749 tree_binary_nonzero_warnv_p (enum tree_code code,
15750 tree type,
15751 tree op0,
15752 tree op1, bool *strict_overflow_p)
15754 bool sub_strict_overflow_p;
15755 switch (code)
15757 case POINTER_PLUS_EXPR:
15758 case PLUS_EXPR:
15759 if (TYPE_OVERFLOW_UNDEFINED (type))
15761 /* With the presence of negative values it is hard
15762 to say something. */
15763 sub_strict_overflow_p = false;
15764 if (!tree_expr_nonnegative_warnv_p (op0,
15765 &sub_strict_overflow_p)
15766 || !tree_expr_nonnegative_warnv_p (op1,
15767 &sub_strict_overflow_p))
15768 return false;
15769 /* One of operands must be positive and the other non-negative. */
15770 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15771 overflows, on a twos-complement machine the sum of two
15772 nonnegative numbers can never be zero. */
15773 return (tree_expr_nonzero_warnv_p (op0,
15774 strict_overflow_p)
15775 || tree_expr_nonzero_warnv_p (op1,
15776 strict_overflow_p));
15778 break;
15780 case MULT_EXPR:
15781 if (TYPE_OVERFLOW_UNDEFINED (type))
15783 if (tree_expr_nonzero_warnv_p (op0,
15784 strict_overflow_p)
15785 && tree_expr_nonzero_warnv_p (op1,
15786 strict_overflow_p))
15788 *strict_overflow_p = true;
15789 return true;
15792 break;
15794 case MIN_EXPR:
15795 sub_strict_overflow_p = false;
15796 if (tree_expr_nonzero_warnv_p (op0,
15797 &sub_strict_overflow_p)
15798 && tree_expr_nonzero_warnv_p (op1,
15799 &sub_strict_overflow_p))
15801 if (sub_strict_overflow_p)
15802 *strict_overflow_p = true;
15804 break;
15806 case MAX_EXPR:
15807 sub_strict_overflow_p = false;
15808 if (tree_expr_nonzero_warnv_p (op0,
15809 &sub_strict_overflow_p))
15811 if (sub_strict_overflow_p)
15812 *strict_overflow_p = true;
15814 /* When both operands are nonzero, then MAX must be too. */
15815 if (tree_expr_nonzero_warnv_p (op1,
15816 strict_overflow_p))
15817 return true;
15819 /* MAX where operand 0 is positive is positive. */
15820 return tree_expr_nonnegative_warnv_p (op0,
15821 strict_overflow_p);
15823 /* MAX where operand 1 is positive is positive. */
15824 else if (tree_expr_nonzero_warnv_p (op1,
15825 &sub_strict_overflow_p)
15826 && tree_expr_nonnegative_warnv_p (op1,
15827 &sub_strict_overflow_p))
15829 if (sub_strict_overflow_p)
15830 *strict_overflow_p = true;
15831 return true;
15833 break;
15835 case BIT_IOR_EXPR:
15836 return (tree_expr_nonzero_warnv_p (op1,
15837 strict_overflow_p)
15838 || tree_expr_nonzero_warnv_p (op0,
15839 strict_overflow_p));
15841 default:
15842 break;
15845 return false;
15848 /* Return true when T is an address and is known to be nonzero.
15849 For floating point we further ensure that T is not denormal.
15850 Similar logic is present in nonzero_address in rtlanal.h.
15852 If the return value is based on the assumption that signed overflow
15853 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15854 change *STRICT_OVERFLOW_P. */
15856 bool
15857 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15859 bool sub_strict_overflow_p;
15860 switch (TREE_CODE (t))
15862 case INTEGER_CST:
15863 return !integer_zerop (t);
15865 case ADDR_EXPR:
15867 tree base = TREE_OPERAND (t, 0);
15868 if (!DECL_P (base))
15869 base = get_base_address (base);
15871 if (!base)
15872 return false;
15874 /* Weak declarations may link to NULL. Other things may also be NULL
15875 so protect with -fdelete-null-pointer-checks; but not variables
15876 allocated on the stack. */
15877 if (DECL_P (base)
15878 && (flag_delete_null_pointer_checks
15879 || (DECL_CONTEXT (base)
15880 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15881 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15882 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15884 /* Constants are never weak. */
15885 if (CONSTANT_CLASS_P (base))
15886 return true;
15888 return false;
15891 case COND_EXPR:
15892 sub_strict_overflow_p = false;
15893 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15894 &sub_strict_overflow_p)
15895 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15896 &sub_strict_overflow_p))
15898 if (sub_strict_overflow_p)
15899 *strict_overflow_p = true;
15900 return true;
15902 break;
15904 default:
15905 break;
15907 return false;
15910 /* Return true when T is an address and is known to be nonzero.
15911 For floating point we further ensure that T is not denormal.
15912 Similar logic is present in nonzero_address in rtlanal.h.
15914 If the return value is based on the assumption that signed overflow
15915 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15916 change *STRICT_OVERFLOW_P. */
15918 bool
15919 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15921 tree type = TREE_TYPE (t);
15922 enum tree_code code;
15924 /* Doing something useful for floating point would need more work. */
15925 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15926 return false;
15928 code = TREE_CODE (t);
15929 switch (TREE_CODE_CLASS (code))
15931 case tcc_unary:
15932 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15933 strict_overflow_p);
15934 case tcc_binary:
15935 case tcc_comparison:
15936 return tree_binary_nonzero_warnv_p (code, type,
15937 TREE_OPERAND (t, 0),
15938 TREE_OPERAND (t, 1),
15939 strict_overflow_p);
15940 case tcc_constant:
15941 case tcc_declaration:
15942 case tcc_reference:
15943 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15945 default:
15946 break;
15949 switch (code)
15951 case TRUTH_NOT_EXPR:
15952 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15953 strict_overflow_p);
15955 case TRUTH_AND_EXPR:
15956 case TRUTH_OR_EXPR:
15957 case TRUTH_XOR_EXPR:
15958 return tree_binary_nonzero_warnv_p (code, type,
15959 TREE_OPERAND (t, 0),
15960 TREE_OPERAND (t, 1),
15961 strict_overflow_p);
15963 case COND_EXPR:
15964 case CONSTRUCTOR:
15965 case OBJ_TYPE_REF:
15966 case ASSERT_EXPR:
15967 case ADDR_EXPR:
15968 case WITH_SIZE_EXPR:
15969 case SSA_NAME:
15970 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15972 case COMPOUND_EXPR:
15973 case MODIFY_EXPR:
15974 case BIND_EXPR:
15975 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15976 strict_overflow_p);
15978 case SAVE_EXPR:
15979 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15980 strict_overflow_p);
15982 case CALL_EXPR:
15983 return alloca_call_p (t);
15985 default:
15986 break;
15988 return false;
15991 /* Return true when T is an address and is known to be nonzero.
15992 Handle warnings about undefined signed overflow. */
15994 bool
15995 tree_expr_nonzero_p (tree t)
15997 bool ret, strict_overflow_p;
15999 strict_overflow_p = false;
16000 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
16001 if (strict_overflow_p)
16002 fold_overflow_warning (("assuming signed overflow does not occur when "
16003 "determining that expression is always "
16004 "non-zero"),
16005 WARN_STRICT_OVERFLOW_MISC);
16006 return ret;
16009 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16010 attempt to fold the expression to a constant without modifying TYPE,
16011 OP0 or OP1.
16013 If the expression could be simplified to a constant, then return
16014 the constant. If the expression would not be simplified to a
16015 constant, then return NULL_TREE. */
16017 tree
16018 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16020 tree tem = fold_binary (code, type, op0, op1);
16021 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16024 /* Given the components of a unary expression CODE, TYPE and OP0,
16025 attempt to fold the expression to a constant without modifying
16026 TYPE or OP0.
16028 If the expression could be simplified to a constant, then return
16029 the constant. If the expression would not be simplified to a
16030 constant, then return NULL_TREE. */
16032 tree
16033 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16035 tree tem = fold_unary (code, type, op0);
16036 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16039 /* If EXP represents referencing an element in a constant string
16040 (either via pointer arithmetic or array indexing), return the
16041 tree representing the value accessed, otherwise return NULL. */
16043 tree
16044 fold_read_from_constant_string (tree exp)
16046 if ((TREE_CODE (exp) == INDIRECT_REF
16047 || TREE_CODE (exp) == ARRAY_REF)
16048 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16050 tree exp1 = TREE_OPERAND (exp, 0);
16051 tree index;
16052 tree string;
16053 location_t loc = EXPR_LOCATION (exp);
16055 if (TREE_CODE (exp) == INDIRECT_REF)
16056 string = string_constant (exp1, &index);
16057 else
16059 tree low_bound = array_ref_low_bound (exp);
16060 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16062 /* Optimize the special-case of a zero lower bound.
16064 We convert the low_bound to sizetype to avoid some problems
16065 with constant folding. (E.g. suppose the lower bound is 1,
16066 and its mode is QI. Without the conversion,l (ARRAY
16067 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16068 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16069 if (! integer_zerop (low_bound))
16070 index = size_diffop_loc (loc, index,
16071 fold_convert_loc (loc, sizetype, low_bound));
16073 string = exp1;
16076 if (string
16077 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16078 && TREE_CODE (string) == STRING_CST
16079 && TREE_CODE (index) == INTEGER_CST
16080 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16081 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16082 == MODE_INT)
16083 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16084 return build_int_cst_type (TREE_TYPE (exp),
16085 (TREE_STRING_POINTER (string)
16086 [TREE_INT_CST_LOW (index)]));
16088 return NULL;
16091 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16092 an integer constant, real, or fixed-point constant.
16094 TYPE is the type of the result. */
16096 static tree
16097 fold_negate_const (tree arg0, tree type)
16099 tree t = NULL_TREE;
16101 switch (TREE_CODE (arg0))
16103 case INTEGER_CST:
16105 double_int val = tree_to_double_int (arg0);
16106 bool overflow;
16107 val = val.neg_with_overflow (&overflow);
16108 t = force_fit_type_double (type, val, 1,
16109 (overflow | TREE_OVERFLOW (arg0))
16110 && !TYPE_UNSIGNED (type));
16111 break;
16114 case REAL_CST:
16115 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16116 break;
16118 case FIXED_CST:
16120 FIXED_VALUE_TYPE f;
16121 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16122 &(TREE_FIXED_CST (arg0)), NULL,
16123 TYPE_SATURATING (type));
16124 t = build_fixed (type, f);
16125 /* Propagate overflow flags. */
16126 if (overflow_p | TREE_OVERFLOW (arg0))
16127 TREE_OVERFLOW (t) = 1;
16128 break;
16131 default:
16132 gcc_unreachable ();
16135 return t;
16138 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16139 an integer constant or real constant.
16141 TYPE is the type of the result. */
16143 tree
16144 fold_abs_const (tree arg0, tree type)
16146 tree t = NULL_TREE;
16148 switch (TREE_CODE (arg0))
16150 case INTEGER_CST:
16152 double_int val = tree_to_double_int (arg0);
16154 /* If the value is unsigned or non-negative, then the absolute value
16155 is the same as the ordinary value. */
16156 if (TYPE_UNSIGNED (type)
16157 || !val.is_negative ())
16158 t = arg0;
16160 /* If the value is negative, then the absolute value is
16161 its negation. */
16162 else
16164 bool overflow;
16165 val = val.neg_with_overflow (&overflow);
16166 t = force_fit_type_double (type, val, -1,
16167 overflow | TREE_OVERFLOW (arg0));
16170 break;
16172 case REAL_CST:
16173 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16174 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16175 else
16176 t = arg0;
16177 break;
16179 default:
16180 gcc_unreachable ();
16183 return t;
16186 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16187 constant. TYPE is the type of the result. */
16189 static tree
16190 fold_not_const (const_tree arg0, tree type)
16192 double_int val;
16194 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16196 val = ~tree_to_double_int (arg0);
16197 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16200 /* Given CODE, a relational operator, the target type, TYPE and two
16201 constant operands OP0 and OP1, return the result of the
16202 relational operation. If the result is not a compile time
16203 constant, then return NULL_TREE. */
16205 static tree
16206 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16208 int result, invert;
16210 /* From here on, the only cases we handle are when the result is
16211 known to be a constant. */
16213 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16215 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16216 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16218 /* Handle the cases where either operand is a NaN. */
16219 if (real_isnan (c0) || real_isnan (c1))
16221 switch (code)
16223 case EQ_EXPR:
16224 case ORDERED_EXPR:
16225 result = 0;
16226 break;
16228 case NE_EXPR:
16229 case UNORDERED_EXPR:
16230 case UNLT_EXPR:
16231 case UNLE_EXPR:
16232 case UNGT_EXPR:
16233 case UNGE_EXPR:
16234 case UNEQ_EXPR:
16235 result = 1;
16236 break;
16238 case LT_EXPR:
16239 case LE_EXPR:
16240 case GT_EXPR:
16241 case GE_EXPR:
16242 case LTGT_EXPR:
16243 if (flag_trapping_math)
16244 return NULL_TREE;
16245 result = 0;
16246 break;
16248 default:
16249 gcc_unreachable ();
16252 return constant_boolean_node (result, type);
16255 return constant_boolean_node (real_compare (code, c0, c1), type);
16258 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16260 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16261 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16262 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16265 /* Handle equality/inequality of complex constants. */
16266 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16268 tree rcond = fold_relational_const (code, type,
16269 TREE_REALPART (op0),
16270 TREE_REALPART (op1));
16271 tree icond = fold_relational_const (code, type,
16272 TREE_IMAGPART (op0),
16273 TREE_IMAGPART (op1));
16274 if (code == EQ_EXPR)
16275 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16276 else if (code == NE_EXPR)
16277 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16278 else
16279 return NULL_TREE;
16282 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16284 unsigned count = VECTOR_CST_NELTS (op0);
16285 tree *elts = XALLOCAVEC (tree, count);
16286 gcc_assert (VECTOR_CST_NELTS (op1) == count
16287 && TYPE_VECTOR_SUBPARTS (type) == count);
16289 for (unsigned i = 0; i < count; i++)
16291 tree elem_type = TREE_TYPE (type);
16292 tree elem0 = VECTOR_CST_ELT (op0, i);
16293 tree elem1 = VECTOR_CST_ELT (op1, i);
16295 tree tem = fold_relational_const (code, elem_type,
16296 elem0, elem1);
16298 if (tem == NULL_TREE)
16299 return NULL_TREE;
16301 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16304 return build_vector (type, elts);
16307 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16309 To compute GT, swap the arguments and do LT.
16310 To compute GE, do LT and invert the result.
16311 To compute LE, swap the arguments, do LT and invert the result.
16312 To compute NE, do EQ and invert the result.
16314 Therefore, the code below must handle only EQ and LT. */
16316 if (code == LE_EXPR || code == GT_EXPR)
16318 tree tem = op0;
16319 op0 = op1;
16320 op1 = tem;
16321 code = swap_tree_comparison (code);
16324 /* Note that it is safe to invert for real values here because we
16325 have already handled the one case that it matters. */
16327 invert = 0;
16328 if (code == NE_EXPR || code == GE_EXPR)
16330 invert = 1;
16331 code = invert_tree_comparison (code, false);
16334 /* Compute a result for LT or EQ if args permit;
16335 Otherwise return T. */
16336 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16338 if (code == EQ_EXPR)
16339 result = tree_int_cst_equal (op0, op1);
16340 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16341 result = INT_CST_LT_UNSIGNED (op0, op1);
16342 else
16343 result = INT_CST_LT (op0, op1);
16345 else
16346 return NULL_TREE;
16348 if (invert)
16349 result ^= 1;
16350 return constant_boolean_node (result, type);
16353 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16354 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16355 itself. */
16357 tree
16358 fold_build_cleanup_point_expr (tree type, tree expr)
16360 /* If the expression does not have side effects then we don't have to wrap
16361 it with a cleanup point expression. */
16362 if (!TREE_SIDE_EFFECTS (expr))
16363 return expr;
16365 /* If the expression is a return, check to see if the expression inside the
16366 return has no side effects or the right hand side of the modify expression
16367 inside the return. If either don't have side effects set we don't need to
16368 wrap the expression in a cleanup point expression. Note we don't check the
16369 left hand side of the modify because it should always be a return decl. */
16370 if (TREE_CODE (expr) == RETURN_EXPR)
16372 tree op = TREE_OPERAND (expr, 0);
16373 if (!op || !TREE_SIDE_EFFECTS (op))
16374 return expr;
16375 op = TREE_OPERAND (op, 1);
16376 if (!TREE_SIDE_EFFECTS (op))
16377 return expr;
16380 return build1 (CLEANUP_POINT_EXPR, type, expr);
16383 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16384 of an indirection through OP0, or NULL_TREE if no simplification is
16385 possible. */
16387 tree
16388 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16390 tree sub = op0;
16391 tree subtype;
16393 STRIP_NOPS (sub);
16394 subtype = TREE_TYPE (sub);
16395 if (!POINTER_TYPE_P (subtype))
16396 return NULL_TREE;
16398 if (TREE_CODE (sub) == ADDR_EXPR)
16400 tree op = TREE_OPERAND (sub, 0);
16401 tree optype = TREE_TYPE (op);
16402 /* *&CONST_DECL -> to the value of the const decl. */
16403 if (TREE_CODE (op) == CONST_DECL)
16404 return DECL_INITIAL (op);
16405 /* *&p => p; make sure to handle *&"str"[cst] here. */
16406 if (type == optype)
16408 tree fop = fold_read_from_constant_string (op);
16409 if (fop)
16410 return fop;
16411 else
16412 return op;
16414 /* *(foo *)&fooarray => fooarray[0] */
16415 else if (TREE_CODE (optype) == ARRAY_TYPE
16416 && type == TREE_TYPE (optype)
16417 && (!in_gimple_form
16418 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16420 tree type_domain = TYPE_DOMAIN (optype);
16421 tree min_val = size_zero_node;
16422 if (type_domain && TYPE_MIN_VALUE (type_domain))
16423 min_val = TYPE_MIN_VALUE (type_domain);
16424 if (in_gimple_form
16425 && TREE_CODE (min_val) != INTEGER_CST)
16426 return NULL_TREE;
16427 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16428 NULL_TREE, NULL_TREE);
16430 /* *(foo *)&complexfoo => __real__ complexfoo */
16431 else if (TREE_CODE (optype) == COMPLEX_TYPE
16432 && type == TREE_TYPE (optype))
16433 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16434 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16435 else if (TREE_CODE (optype) == VECTOR_TYPE
16436 && type == TREE_TYPE (optype))
16438 tree part_width = TYPE_SIZE (type);
16439 tree index = bitsize_int (0);
16440 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16444 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16445 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16447 tree op00 = TREE_OPERAND (sub, 0);
16448 tree op01 = TREE_OPERAND (sub, 1);
16450 STRIP_NOPS (op00);
16451 if (TREE_CODE (op00) == ADDR_EXPR)
16453 tree op00type;
16454 op00 = TREE_OPERAND (op00, 0);
16455 op00type = TREE_TYPE (op00);
16457 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16458 if (TREE_CODE (op00type) == VECTOR_TYPE
16459 && type == TREE_TYPE (op00type))
16461 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16462 tree part_width = TYPE_SIZE (type);
16463 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16464 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16465 tree index = bitsize_int (indexi);
16467 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16468 return fold_build3_loc (loc,
16469 BIT_FIELD_REF, type, op00,
16470 part_width, index);
16473 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16474 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16475 && type == TREE_TYPE (op00type))
16477 tree size = TYPE_SIZE_UNIT (type);
16478 if (tree_int_cst_equal (size, op01))
16479 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16481 /* ((foo *)&fooarray)[1] => fooarray[1] */
16482 else if (TREE_CODE (op00type) == ARRAY_TYPE
16483 && type == TREE_TYPE (op00type))
16485 tree type_domain = TYPE_DOMAIN (op00type);
16486 tree min_val = size_zero_node;
16487 if (type_domain && TYPE_MIN_VALUE (type_domain))
16488 min_val = TYPE_MIN_VALUE (type_domain);
16489 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16490 TYPE_SIZE_UNIT (type));
16491 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16492 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16493 NULL_TREE, NULL_TREE);
16498 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16499 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16500 && type == TREE_TYPE (TREE_TYPE (subtype))
16501 && (!in_gimple_form
16502 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16504 tree type_domain;
16505 tree min_val = size_zero_node;
16506 sub = build_fold_indirect_ref_loc (loc, sub);
16507 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16508 if (type_domain && TYPE_MIN_VALUE (type_domain))
16509 min_val = TYPE_MIN_VALUE (type_domain);
16510 if (in_gimple_form
16511 && TREE_CODE (min_val) != INTEGER_CST)
16512 return NULL_TREE;
16513 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16514 NULL_TREE);
16517 return NULL_TREE;
16520 /* Builds an expression for an indirection through T, simplifying some
16521 cases. */
16523 tree
16524 build_fold_indirect_ref_loc (location_t loc, tree t)
16526 tree type = TREE_TYPE (TREE_TYPE (t));
16527 tree sub = fold_indirect_ref_1 (loc, type, t);
16529 if (sub)
16530 return sub;
16532 return build1_loc (loc, INDIRECT_REF, type, t);
16535 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16537 tree
16538 fold_indirect_ref_loc (location_t loc, tree t)
16540 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16542 if (sub)
16543 return sub;
16544 else
16545 return t;
16548 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16549 whose result is ignored. The type of the returned tree need not be
16550 the same as the original expression. */
16552 tree
16553 fold_ignored_result (tree t)
16555 if (!TREE_SIDE_EFFECTS (t))
16556 return integer_zero_node;
16558 for (;;)
16559 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16561 case tcc_unary:
16562 t = TREE_OPERAND (t, 0);
16563 break;
16565 case tcc_binary:
16566 case tcc_comparison:
16567 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16568 t = TREE_OPERAND (t, 0);
16569 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16570 t = TREE_OPERAND (t, 1);
16571 else
16572 return t;
16573 break;
16575 case tcc_expression:
16576 switch (TREE_CODE (t))
16578 case COMPOUND_EXPR:
16579 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16580 return t;
16581 t = TREE_OPERAND (t, 0);
16582 break;
16584 case COND_EXPR:
16585 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16586 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16587 return t;
16588 t = TREE_OPERAND (t, 0);
16589 break;
16591 default:
16592 return t;
16594 break;
16596 default:
16597 return t;
16601 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16602 This can only be applied to objects of a sizetype. */
16604 tree
16605 round_up_loc (location_t loc, tree value, int divisor)
16607 tree div = NULL_TREE;
16609 gcc_assert (divisor > 0);
16610 if (divisor == 1)
16611 return value;
16613 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16614 have to do anything. Only do this when we are not given a const,
16615 because in that case, this check is more expensive than just
16616 doing it. */
16617 if (TREE_CODE (value) != INTEGER_CST)
16619 div = build_int_cst (TREE_TYPE (value), divisor);
16621 if (multiple_of_p (TREE_TYPE (value), value, div))
16622 return value;
16625 /* If divisor is a power of two, simplify this to bit manipulation. */
16626 if (divisor == (divisor & -divisor))
16628 if (TREE_CODE (value) == INTEGER_CST)
16630 double_int val = tree_to_double_int (value);
16631 bool overflow_p;
16633 if ((val.low & (divisor - 1)) == 0)
16634 return value;
16636 overflow_p = TREE_OVERFLOW (value);
16637 val.low &= ~(divisor - 1);
16638 val.low += divisor;
16639 if (val.low == 0)
16641 val.high++;
16642 if (val.high == 0)
16643 overflow_p = true;
16646 return force_fit_type_double (TREE_TYPE (value), val,
16647 -1, overflow_p);
16649 else
16651 tree t;
16653 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16654 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16655 t = build_int_cst (TREE_TYPE (value), -divisor);
16656 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16659 else
16661 if (!div)
16662 div = build_int_cst (TREE_TYPE (value), divisor);
16663 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16664 value = size_binop_loc (loc, MULT_EXPR, value, div);
16667 return value;
16670 /* Likewise, but round down. */
16672 tree
16673 round_down_loc (location_t loc, tree value, int divisor)
16675 tree div = NULL_TREE;
16677 gcc_assert (divisor > 0);
16678 if (divisor == 1)
16679 return value;
16681 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16682 have to do anything. Only do this when we are not given a const,
16683 because in that case, this check is more expensive than just
16684 doing it. */
16685 if (TREE_CODE (value) != INTEGER_CST)
16687 div = build_int_cst (TREE_TYPE (value), divisor);
16689 if (multiple_of_p (TREE_TYPE (value), value, div))
16690 return value;
16693 /* If divisor is a power of two, simplify this to bit manipulation. */
16694 if (divisor == (divisor & -divisor))
16696 tree t;
16698 t = build_int_cst (TREE_TYPE (value), -divisor);
16699 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16701 else
16703 if (!div)
16704 div = build_int_cst (TREE_TYPE (value), divisor);
16705 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16706 value = size_binop_loc (loc, MULT_EXPR, value, div);
16709 return value;
16712 /* Returns the pointer to the base of the object addressed by EXP and
16713 extracts the information about the offset of the access, storing it
16714 to PBITPOS and POFFSET. */
16716 static tree
16717 split_address_to_core_and_offset (tree exp,
16718 HOST_WIDE_INT *pbitpos, tree *poffset)
16720 tree core;
16721 enum machine_mode mode;
16722 int unsignedp, volatilep;
16723 HOST_WIDE_INT bitsize;
16724 location_t loc = EXPR_LOCATION (exp);
16726 if (TREE_CODE (exp) == ADDR_EXPR)
16728 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16729 poffset, &mode, &unsignedp, &volatilep,
16730 false);
16731 core = build_fold_addr_expr_loc (loc, core);
16733 else
16735 core = exp;
16736 *pbitpos = 0;
16737 *poffset = NULL_TREE;
16740 return core;
16743 /* Returns true if addresses of E1 and E2 differ by a constant, false
16744 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16746 bool
16747 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16749 tree core1, core2;
16750 HOST_WIDE_INT bitpos1, bitpos2;
16751 tree toffset1, toffset2, tdiff, type;
16753 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16754 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16756 if (bitpos1 % BITS_PER_UNIT != 0
16757 || bitpos2 % BITS_PER_UNIT != 0
16758 || !operand_equal_p (core1, core2, 0))
16759 return false;
16761 if (toffset1 && toffset2)
16763 type = TREE_TYPE (toffset1);
16764 if (type != TREE_TYPE (toffset2))
16765 toffset2 = fold_convert (type, toffset2);
16767 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16768 if (!cst_and_fits_in_hwi (tdiff))
16769 return false;
16771 *diff = int_cst_value (tdiff);
16773 else if (toffset1 || toffset2)
16775 /* If only one of the offsets is non-constant, the difference cannot
16776 be a constant. */
16777 return false;
16779 else
16780 *diff = 0;
16782 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16783 return true;
16786 /* Simplify the floating point expression EXP when the sign of the
16787 result is not significant. Return NULL_TREE if no simplification
16788 is possible. */
16790 tree
16791 fold_strip_sign_ops (tree exp)
16793 tree arg0, arg1;
16794 location_t loc = EXPR_LOCATION (exp);
16796 switch (TREE_CODE (exp))
16798 case ABS_EXPR:
16799 case NEGATE_EXPR:
16800 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16801 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16803 case MULT_EXPR:
16804 case RDIV_EXPR:
16805 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16806 return NULL_TREE;
16807 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16808 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16809 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16810 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16811 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16812 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16813 break;
16815 case COMPOUND_EXPR:
16816 arg0 = TREE_OPERAND (exp, 0);
16817 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16818 if (arg1)
16819 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16820 break;
16822 case COND_EXPR:
16823 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16824 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16825 if (arg0 || arg1)
16826 return fold_build3_loc (loc,
16827 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16828 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16829 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16830 break;
16832 case CALL_EXPR:
16834 const enum built_in_function fcode = builtin_mathfn_code (exp);
16835 switch (fcode)
16837 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16838 /* Strip copysign function call, return the 1st argument. */
16839 arg0 = CALL_EXPR_ARG (exp, 0);
16840 arg1 = CALL_EXPR_ARG (exp, 1);
16841 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16843 default:
16844 /* Strip sign ops from the argument of "odd" math functions. */
16845 if (negate_mathfn_p (fcode))
16847 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16848 if (arg0)
16849 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16851 break;
16854 break;
16856 default:
16857 break;
16859 return NULL_TREE;