PR/56490
[official-gcc.git] / gcc / fold-const.c
blob26cfc0e83e430c2a71c6c242a00602cab5b40e36
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "realmpfr.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "diagnostic-core.h"
55 #include "intl.h"
56 #include "ggc.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
59 #include "md5.h"
60 #include "gimple.h"
61 #include "tree-flow.h"
63 /* Nonzero if we are folding constants inside an initializer; zero
64 otherwise. */
65 int folding_initializer = 0;
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code {
71 COMPCODE_FALSE = 0,
72 COMPCODE_LT = 1,
73 COMPCODE_EQ = 2,
74 COMPCODE_LE = 3,
75 COMPCODE_GT = 4,
76 COMPCODE_LTGT = 5,
77 COMPCODE_GE = 6,
78 COMPCODE_ORD = 7,
79 COMPCODE_UNORD = 8,
80 COMPCODE_UNLT = 9,
81 COMPCODE_UNEQ = 10,
82 COMPCODE_UNLE = 11,
83 COMPCODE_UNGT = 12,
84 COMPCODE_NE = 13,
85 COMPCODE_UNGE = 14,
86 COMPCODE_TRUE = 15
89 static bool negate_mathfn_p (enum built_in_function);
90 static bool negate_expr_p (tree);
91 static tree negate_expr (tree);
92 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
93 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
94 static tree const_binop (enum tree_code, tree, tree);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
101 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (location_t, tree, tree,
103 HOST_WIDE_INT, HOST_WIDE_INT, int);
104 static tree optimize_bit_field_compare (location_t, enum tree_code,
105 tree, tree, tree);
106 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
107 HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (const_tree, int);
111 static tree sign_bit_p (tree, const_tree);
112 static int simple_operand_p (const_tree);
113 static bool simple_operand_p_2 (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree optimize_minmax_comparison (location_t, enum tree_code,
121 tree, tree, tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
124 static tree fold_binary_op_with_conditional_arg (location_t,
125 enum tree_code, tree,
126 tree, tree,
127 tree, tree, int);
128 static tree fold_mathfn_compare (location_t,
129 enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (const_tree, const_tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (const_tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_convert_const (enum tree_code, tree, tree);
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140 Otherwise, return LOC. */
142 static location_t
143 expr_location_or (tree t, location_t loc)
145 location_t tloc = EXPR_LOCATION (t);
146 return tloc == UNKNOWN_LOCATION ? loc : tloc;
149 /* Similar to protected_set_expr_location, but never modify x in place,
150 if location can and needs to be set, unshare it. */
152 static inline tree
153 protected_set_expr_location_unshare (tree x, location_t loc)
155 if (CAN_HAVE_LOCATION_P (x)
156 && EXPR_LOCATION (x) != loc
157 && !(TREE_CODE (x) == SAVE_EXPR
158 || TREE_CODE (x) == TARGET_EXPR
159 || TREE_CODE (x) == BIND_EXPR))
161 x = copy_node (x);
162 SET_EXPR_LOCATION (x, loc);
164 return x;
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168 of type CODE and returns the quotient.
169 Otherwise returns NULL_TREE. */
171 tree
172 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
174 double_int quo, rem;
175 int uns;
177 /* The sign of the division is according to operand two, that
178 does the correct thing for POINTER_PLUS_EXPR where we want
179 a signed division. */
180 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
182 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
183 uns, code, &rem);
185 if (rem.is_zero ())
186 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
188 return NULL_TREE;
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
198 used. */
200 static int fold_deferring_overflow_warnings;
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
207 static const char* fold_deferred_overflow_warning;
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
212 static enum warn_strict_overflow_code fold_deferred_overflow_code;
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
217 void
218 fold_defer_overflow_warnings (void)
220 ++fold_deferring_overflow_warnings;
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
230 deferred code. */
232 void
233 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
235 const char *warnmsg;
236 location_t locus;
238 gcc_assert (fold_deferring_overflow_warnings > 0);
239 --fold_deferring_overflow_warnings;
240 if (fold_deferring_overflow_warnings > 0)
242 if (fold_deferred_overflow_warning != NULL
243 && code != 0
244 && code < (int) fold_deferred_overflow_code)
245 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
246 return;
249 warnmsg = fold_deferred_overflow_warning;
250 fold_deferred_overflow_warning = NULL;
252 if (!issue || warnmsg == NULL)
253 return;
255 if (gimple_no_warning_p (stmt))
256 return;
258 /* Use the smallest code level when deciding to issue the
259 warning. */
260 if (code == 0 || code > (int) fold_deferred_overflow_code)
261 code = fold_deferred_overflow_code;
263 if (!issue_strict_overflow_warning (code))
264 return;
266 if (stmt == NULL)
267 locus = input_location;
268 else
269 locus = gimple_location (stmt);
270 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
273 /* Stop deferring overflow warnings, ignoring any deferred
274 warnings. */
276 void
277 fold_undefer_and_ignore_overflow_warnings (void)
279 fold_undefer_overflow_warnings (false, NULL, 0);
282 /* Whether we are deferring overflow warnings. */
284 bool
285 fold_deferring_overflow_warnings_p (void)
287 return fold_deferring_overflow_warnings > 0;
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
293 static void
294 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
296 if (fold_deferring_overflow_warnings > 0)
298 if (fold_deferred_overflow_warning == NULL
299 || wc < fold_deferred_overflow_code)
301 fold_deferred_overflow_warning = gmsgid;
302 fold_deferred_overflow_code = wc;
305 else if (issue_strict_overflow_warning (wc))
306 warning (OPT_Wstrict_overflow, gmsgid);
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
312 static bool
313 negate_mathfn_p (enum built_in_function code)
315 switch (code)
317 CASE_FLT_FN (BUILT_IN_ASIN):
318 CASE_FLT_FN (BUILT_IN_ASINH):
319 CASE_FLT_FN (BUILT_IN_ATAN):
320 CASE_FLT_FN (BUILT_IN_ATANH):
321 CASE_FLT_FN (BUILT_IN_CASIN):
322 CASE_FLT_FN (BUILT_IN_CASINH):
323 CASE_FLT_FN (BUILT_IN_CATAN):
324 CASE_FLT_FN (BUILT_IN_CATANH):
325 CASE_FLT_FN (BUILT_IN_CBRT):
326 CASE_FLT_FN (BUILT_IN_CPROJ):
327 CASE_FLT_FN (BUILT_IN_CSIN):
328 CASE_FLT_FN (BUILT_IN_CSINH):
329 CASE_FLT_FN (BUILT_IN_CTAN):
330 CASE_FLT_FN (BUILT_IN_CTANH):
331 CASE_FLT_FN (BUILT_IN_ERF):
332 CASE_FLT_FN (BUILT_IN_LLROUND):
333 CASE_FLT_FN (BUILT_IN_LROUND):
334 CASE_FLT_FN (BUILT_IN_ROUND):
335 CASE_FLT_FN (BUILT_IN_SIN):
336 CASE_FLT_FN (BUILT_IN_SINH):
337 CASE_FLT_FN (BUILT_IN_TAN):
338 CASE_FLT_FN (BUILT_IN_TANH):
339 CASE_FLT_FN (BUILT_IN_TRUNC):
340 return true;
342 CASE_FLT_FN (BUILT_IN_LLRINT):
343 CASE_FLT_FN (BUILT_IN_LRINT):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT):
345 CASE_FLT_FN (BUILT_IN_RINT):
346 return !flag_rounding_math;
348 default:
349 break;
351 return false;
354 /* Check whether we may negate an integer constant T without causing
355 overflow. */
357 bool
358 may_negate_without_overflow_p (const_tree t)
360 unsigned HOST_WIDE_INT val;
361 unsigned int prec;
362 tree type;
364 gcc_assert (TREE_CODE (t) == INTEGER_CST);
366 type = TREE_TYPE (t);
367 if (TYPE_UNSIGNED (type))
368 return false;
370 prec = TYPE_PRECISION (type);
371 if (prec > HOST_BITS_PER_WIDE_INT)
373 if (TREE_INT_CST_LOW (t) != 0)
374 return true;
375 prec -= HOST_BITS_PER_WIDE_INT;
376 val = TREE_INT_CST_HIGH (t);
378 else
379 val = TREE_INT_CST_LOW (t);
380 if (prec < HOST_BITS_PER_WIDE_INT)
381 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
382 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
388 static bool
389 negate_expr_p (tree t)
391 tree type;
393 if (t == 0)
394 return false;
396 type = TREE_TYPE (t);
398 STRIP_SIGN_NOPS (t);
399 switch (TREE_CODE (t))
401 case INTEGER_CST:
402 if (TYPE_OVERFLOW_WRAPS (type))
403 return true;
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
407 case BIT_NOT_EXPR:
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
411 case FIXED_CST:
412 case NEGATE_EXPR:
413 return true;
415 case REAL_CST:
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
420 case COMPLEX_CST:
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
433 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
434 return false;
435 /* -(A + B) -> (-B) - A. */
436 if (negate_expr_p (TREE_OPERAND (t, 1))
437 && reorder_operands_p (TREE_OPERAND (t, 0),
438 TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
446 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
447 && reorder_operands_p (TREE_OPERAND (t, 0),
448 TREE_OPERAND (t, 1));
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (TREE_TYPE (t)))
452 break;
454 /* Fall through. */
456 case RDIV_EXPR:
457 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
458 return negate_expr_p (TREE_OPERAND (t, 1))
459 || negate_expr_p (TREE_OPERAND (t, 0));
460 break;
462 case TRUNC_DIV_EXPR:
463 case ROUND_DIV_EXPR:
464 case FLOOR_DIV_EXPR:
465 case CEIL_DIV_EXPR:
466 case EXACT_DIV_EXPR:
467 /* In general we can't negate A / B, because if A is INT_MIN and
468 B is 1, we may turn this into INT_MIN / -1 which is undefined
469 and actually traps on some architectures. But if overflow is
470 undefined, we can negate, because - (INT_MIN / 1) is an
471 overflow. */
472 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
473 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
474 break;
475 return negate_expr_p (TREE_OPERAND (t, 1))
476 || negate_expr_p (TREE_OPERAND (t, 0));
478 case NOP_EXPR:
479 /* Negate -((double)float) as (double)(-float). */
480 if (TREE_CODE (type) == REAL_TYPE)
482 tree tem = strip_float_extensions (t);
483 if (tem != t)
484 return negate_expr_p (tem);
486 break;
488 case CALL_EXPR:
489 /* Negate -f(x) as f(-x). */
490 if (negate_mathfn_p (builtin_mathfn_code (t)))
491 return negate_expr_p (CALL_EXPR_ARG (t, 0));
492 break;
494 case RSHIFT_EXPR:
495 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
496 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
498 tree op1 = TREE_OPERAND (t, 1);
499 if (TREE_INT_CST_HIGH (op1) == 0
500 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
501 == TREE_INT_CST_LOW (op1))
502 return true;
504 break;
506 default:
507 break;
509 return false;
512 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
513 simplification is possible.
514 If negate_expr_p would return true for T, NULL_TREE will never be
515 returned. */
517 static tree
518 fold_negate_expr (location_t loc, tree t)
520 tree type = TREE_TYPE (t);
521 tree tem;
523 switch (TREE_CODE (t))
525 /* Convert - (~A) to A + 1. */
526 case BIT_NOT_EXPR:
527 if (INTEGRAL_TYPE_P (type))
528 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
529 build_int_cst (type, 1));
530 break;
532 case INTEGER_CST:
533 tem = fold_negate_const (t, type);
534 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
535 || !TYPE_OVERFLOW_TRAPS (type))
536 return tem;
537 break;
539 case REAL_CST:
540 tem = fold_negate_const (t, type);
541 /* Two's complement FP formats, such as c4x, may overflow. */
542 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
543 return tem;
544 break;
546 case FIXED_CST:
547 tem = fold_negate_const (t, type);
548 return tem;
550 case COMPLEX_CST:
552 tree rpart = negate_expr (TREE_REALPART (t));
553 tree ipart = negate_expr (TREE_IMAGPART (t));
555 if ((TREE_CODE (rpart) == REAL_CST
556 && TREE_CODE (ipart) == REAL_CST)
557 || (TREE_CODE (rpart) == INTEGER_CST
558 && TREE_CODE (ipart) == INTEGER_CST))
559 return build_complex (type, rpart, ipart);
561 break;
563 case COMPLEX_EXPR:
564 if (negate_expr_p (t))
565 return fold_build2_loc (loc, COMPLEX_EXPR, type,
566 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
567 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
568 break;
570 case CONJ_EXPR:
571 if (negate_expr_p (t))
572 return fold_build1_loc (loc, CONJ_EXPR, type,
573 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
574 break;
576 case NEGATE_EXPR:
577 return TREE_OPERAND (t, 0);
579 case PLUS_EXPR:
580 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
581 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
583 /* -(A + B) -> (-B) - A. */
584 if (negate_expr_p (TREE_OPERAND (t, 1))
585 && reorder_operands_p (TREE_OPERAND (t, 0),
586 TREE_OPERAND (t, 1)))
588 tem = negate_expr (TREE_OPERAND (t, 1));
589 return fold_build2_loc (loc, MINUS_EXPR, type,
590 tem, TREE_OPERAND (t, 0));
593 /* -(A + B) -> (-A) - B. */
594 if (negate_expr_p (TREE_OPERAND (t, 0)))
596 tem = negate_expr (TREE_OPERAND (t, 0));
597 return fold_build2_loc (loc, MINUS_EXPR, type,
598 tem, TREE_OPERAND (t, 1));
601 break;
603 case MINUS_EXPR:
604 /* - (A - B) -> B - A */
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
606 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
607 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
608 return fold_build2_loc (loc, MINUS_EXPR, type,
609 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
610 break;
612 case MULT_EXPR:
613 if (TYPE_UNSIGNED (type))
614 break;
616 /* Fall through. */
618 case RDIV_EXPR:
619 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
621 tem = TREE_OPERAND (t, 1);
622 if (negate_expr_p (tem))
623 return fold_build2_loc (loc, TREE_CODE (t), type,
624 TREE_OPERAND (t, 0), negate_expr (tem));
625 tem = TREE_OPERAND (t, 0);
626 if (negate_expr_p (tem))
627 return fold_build2_loc (loc, TREE_CODE (t), type,
628 negate_expr (tem), TREE_OPERAND (t, 1));
630 break;
632 case TRUNC_DIV_EXPR:
633 case ROUND_DIV_EXPR:
634 case FLOOR_DIV_EXPR:
635 case CEIL_DIV_EXPR:
636 case EXACT_DIV_EXPR:
637 /* In general we can't negate A / B, because if A is INT_MIN and
638 B is 1, we may turn this into INT_MIN / -1 which is undefined
639 and actually traps on some architectures. But if overflow is
640 undefined, we can negate, because - (INT_MIN / 1) is an
641 overflow. */
642 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
644 const char * const warnmsg = G_("assuming signed overflow does not "
645 "occur when negating a division");
646 tem = TREE_OPERAND (t, 1);
647 if (negate_expr_p (tem))
649 if (INTEGRAL_TYPE_P (type)
650 && (TREE_CODE (tem) != INTEGER_CST
651 || integer_onep (tem)))
652 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
653 return fold_build2_loc (loc, TREE_CODE (t), type,
654 TREE_OPERAND (t, 0), negate_expr (tem));
656 tem = TREE_OPERAND (t, 0);
657 if (negate_expr_p (tem))
659 if (INTEGRAL_TYPE_P (type)
660 && (TREE_CODE (tem) != INTEGER_CST
661 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
662 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
663 return fold_build2_loc (loc, TREE_CODE (t), type,
664 negate_expr (tem), TREE_OPERAND (t, 1));
667 break;
669 case NOP_EXPR:
670 /* Convert -((double)float) into (double)(-float). */
671 if (TREE_CODE (type) == REAL_TYPE)
673 tem = strip_float_extensions (t);
674 if (tem != t && negate_expr_p (tem))
675 return fold_convert_loc (loc, type, negate_expr (tem));
677 break;
679 case CALL_EXPR:
680 /* Negate -f(x) as f(-x). */
681 if (negate_mathfn_p (builtin_mathfn_code (t))
682 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
684 tree fndecl, arg;
686 fndecl = get_callee_fndecl (t);
687 arg = negate_expr (CALL_EXPR_ARG (t, 0));
688 return build_call_expr_loc (loc, fndecl, 1, arg);
690 break;
692 case RSHIFT_EXPR:
693 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
694 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
696 tree op1 = TREE_OPERAND (t, 1);
697 if (TREE_INT_CST_HIGH (op1) == 0
698 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
699 == TREE_INT_CST_LOW (op1))
701 tree ntype = TYPE_UNSIGNED (type)
702 ? signed_type_for (type)
703 : unsigned_type_for (type);
704 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
705 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
706 return fold_convert_loc (loc, type, temp);
709 break;
711 default:
712 break;
715 return NULL_TREE;
718 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
719 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
720 return NULL_TREE. */
722 static tree
723 negate_expr (tree t)
725 tree type, tem;
726 location_t loc;
728 if (t == NULL_TREE)
729 return NULL_TREE;
731 loc = EXPR_LOCATION (t);
732 type = TREE_TYPE (t);
733 STRIP_SIGN_NOPS (t);
735 tem = fold_negate_expr (loc, t);
736 if (!tem)
737 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
738 return fold_convert_loc (loc, type, tem);
741 /* Split a tree IN into a constant, literal and variable parts that could be
742 combined with CODE to make IN. "constant" means an expression with
743 TREE_CONSTANT but that isn't an actual constant. CODE must be a
744 commutative arithmetic operation. Store the constant part into *CONP,
745 the literal in *LITP and return the variable part. If a part isn't
746 present, set it to null. If the tree does not decompose in this way,
747 return the entire tree as the variable part and the other parts as null.
749 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
750 case, we negate an operand that was subtracted. Except if it is a
751 literal for which we use *MINUS_LITP instead.
753 If NEGATE_P is true, we are negating all of IN, again except a literal
754 for which we use *MINUS_LITP instead.
756 If IN is itself a literal or constant, return it as appropriate.
758 Note that we do not guarantee that any of the three values will be the
759 same type as IN, but they will have the same signedness and mode. */
761 static tree
762 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
763 tree *minus_litp, int negate_p)
765 tree var = 0;
767 *conp = 0;
768 *litp = 0;
769 *minus_litp = 0;
771 /* Strip any conversions that don't change the machine mode or signedness. */
772 STRIP_SIGN_NOPS (in);
774 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
775 || TREE_CODE (in) == FIXED_CST)
776 *litp = in;
777 else if (TREE_CODE (in) == code
778 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
779 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
780 /* We can associate addition and subtraction together (even
781 though the C standard doesn't say so) for integers because
782 the value is not affected. For reals, the value might be
783 affected, so we can't. */
784 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
785 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
787 tree op0 = TREE_OPERAND (in, 0);
788 tree op1 = TREE_OPERAND (in, 1);
789 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
790 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
792 /* First see if either of the operands is a literal, then a constant. */
793 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
794 || TREE_CODE (op0) == FIXED_CST)
795 *litp = op0, op0 = 0;
796 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
797 || TREE_CODE (op1) == FIXED_CST)
798 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
800 if (op0 != 0 && TREE_CONSTANT (op0))
801 *conp = op0, op0 = 0;
802 else if (op1 != 0 && TREE_CONSTANT (op1))
803 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
805 /* If we haven't dealt with either operand, this is not a case we can
806 decompose. Otherwise, VAR is either of the ones remaining, if any. */
807 if (op0 != 0 && op1 != 0)
808 var = in;
809 else if (op0 != 0)
810 var = op0;
811 else
812 var = op1, neg_var_p = neg1_p;
814 /* Now do any needed negations. */
815 if (neg_litp_p)
816 *minus_litp = *litp, *litp = 0;
817 if (neg_conp_p)
818 *conp = negate_expr (*conp);
819 if (neg_var_p)
820 var = negate_expr (var);
822 else if (TREE_CODE (in) == BIT_NOT_EXPR
823 && code == PLUS_EXPR)
825 /* -X - 1 is folded to ~X, undo that here. */
826 *minus_litp = build_one_cst (TREE_TYPE (in));
827 var = negate_expr (TREE_OPERAND (in, 0));
829 else if (TREE_CONSTANT (in))
830 *conp = in;
831 else
832 var = in;
834 if (negate_p)
836 if (*litp)
837 *minus_litp = *litp, *litp = 0;
838 else if (*minus_litp)
839 *litp = *minus_litp, *minus_litp = 0;
840 *conp = negate_expr (*conp);
841 var = negate_expr (var);
844 return var;
847 /* Re-associate trees split by the above function. T1 and T2 are
848 either expressions to associate or null. Return the new
849 expression, if any. LOC is the location of the new expression. If
850 we build an operation, do it in TYPE and with CODE. */
852 static tree
853 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
855 if (t1 == 0)
856 return t2;
857 else if (t2 == 0)
858 return t1;
860 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
861 try to fold this since we will have infinite recursion. But do
862 deal with any NEGATE_EXPRs. */
863 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
864 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
866 if (code == PLUS_EXPR)
868 if (TREE_CODE (t1) == NEGATE_EXPR)
869 return build2_loc (loc, MINUS_EXPR, type,
870 fold_convert_loc (loc, type, t2),
871 fold_convert_loc (loc, type,
872 TREE_OPERAND (t1, 0)));
873 else if (TREE_CODE (t2) == NEGATE_EXPR)
874 return build2_loc (loc, MINUS_EXPR, type,
875 fold_convert_loc (loc, type, t1),
876 fold_convert_loc (loc, type,
877 TREE_OPERAND (t2, 0)));
878 else if (integer_zerop (t2))
879 return fold_convert_loc (loc, type, t1);
881 else if (code == MINUS_EXPR)
883 if (integer_zerop (t2))
884 return fold_convert_loc (loc, type, t1);
887 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
888 fold_convert_loc (loc, type, t2));
891 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
892 fold_convert_loc (loc, type, t2));
895 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
896 for use in int_const_binop, size_binop and size_diffop. */
898 static bool
899 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
901 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
902 return false;
903 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
904 return false;
906 switch (code)
908 case LSHIFT_EXPR:
909 case RSHIFT_EXPR:
910 case LROTATE_EXPR:
911 case RROTATE_EXPR:
912 return true;
914 default:
915 break;
918 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
919 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
920 && TYPE_MODE (type1) == TYPE_MODE (type2);
924 /* Combine two integer constants ARG1 and ARG2 under operation CODE
925 to produce a new constant. Return NULL_TREE if we don't know how
926 to evaluate CODE at compile-time. */
928 static tree
929 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
930 int overflowable)
932 double_int op1, op2, res, tmp;
933 tree t;
934 tree type = TREE_TYPE (arg1);
935 bool uns = TYPE_UNSIGNED (type);
936 bool overflow = false;
938 op1 = tree_to_double_int (arg1);
939 op2 = tree_to_double_int (arg2);
941 switch (code)
943 case BIT_IOR_EXPR:
944 res = op1 | op2;
945 break;
947 case BIT_XOR_EXPR:
948 res = op1 ^ op2;
949 break;
951 case BIT_AND_EXPR:
952 res = op1 & op2;
953 break;
955 case RSHIFT_EXPR:
956 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
957 break;
959 case LSHIFT_EXPR:
960 /* It's unclear from the C standard whether shifts can overflow.
961 The following code ignores overflow; perhaps a C standard
962 interpretation ruling is needed. */
963 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
964 break;
966 case RROTATE_EXPR:
967 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
968 break;
970 case LROTATE_EXPR:
971 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
972 break;
974 case PLUS_EXPR:
975 res = op1.add_with_sign (op2, false, &overflow);
976 break;
978 case MINUS_EXPR:
979 res = op1.sub_with_overflow (op2, &overflow);
980 break;
982 case MULT_EXPR:
983 res = op1.mul_with_sign (op2, false, &overflow);
984 break;
986 case MULT_HIGHPART_EXPR:
987 /* ??? Need quad precision, or an additional shift operand
988 to the multiply primitive, to handle very large highparts. */
989 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
990 return NULL_TREE;
991 tmp = op1 - op2;
992 res = tmp.rshift (TYPE_PRECISION (type), TYPE_PRECISION (type), !uns);
993 break;
995 case TRUNC_DIV_EXPR:
996 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
997 case EXACT_DIV_EXPR:
998 /* This is a shortcut for a common special case. */
999 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1000 && !TREE_OVERFLOW (arg1)
1001 && !TREE_OVERFLOW (arg2)
1002 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1004 if (code == CEIL_DIV_EXPR)
1005 op1.low += op2.low - 1;
1007 res.low = op1.low / op2.low, res.high = 0;
1008 break;
1011 /* ... fall through ... */
1013 case ROUND_DIV_EXPR:
1014 if (op2.is_zero ())
1015 return NULL_TREE;
1016 if (op2.is_one ())
1018 res = op1;
1019 break;
1021 if (op1 == op2 && !op1.is_zero ())
1023 res = double_int_one;
1024 break;
1026 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1027 break;
1029 case TRUNC_MOD_EXPR:
1030 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1031 /* This is a shortcut for a common special case. */
1032 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1033 && !TREE_OVERFLOW (arg1)
1034 && !TREE_OVERFLOW (arg2)
1035 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1037 if (code == CEIL_MOD_EXPR)
1038 op1.low += op2.low - 1;
1039 res.low = op1.low % op2.low, res.high = 0;
1040 break;
1043 /* ... fall through ... */
1045 case ROUND_MOD_EXPR:
1046 if (op2.is_zero ())
1047 return NULL_TREE;
1048 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1049 break;
1051 case MIN_EXPR:
1052 res = op1.min (op2, uns);
1053 break;
1055 case MAX_EXPR:
1056 res = op1.max (op2, uns);
1057 break;
1059 default:
1060 return NULL_TREE;
1063 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1064 (!uns && overflow)
1065 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1067 return t;
1070 tree
1071 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1073 return int_const_binop_1 (code, arg1, arg2, 1);
1076 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1077 constant. We assume ARG1 and ARG2 have the same data type, or at least
1078 are the same kind of constant and the same machine mode. Return zero if
1079 combining the constants is not allowed in the current operating mode. */
1081 static tree
1082 const_binop (enum tree_code code, tree arg1, tree arg2)
1084 /* Sanity check for the recursive cases. */
1085 if (!arg1 || !arg2)
1086 return NULL_TREE;
1088 STRIP_NOPS (arg1);
1089 STRIP_NOPS (arg2);
1091 if (TREE_CODE (arg1) == INTEGER_CST)
1092 return int_const_binop (code, arg1, arg2);
1094 if (TREE_CODE (arg1) == REAL_CST)
1096 enum machine_mode mode;
1097 REAL_VALUE_TYPE d1;
1098 REAL_VALUE_TYPE d2;
1099 REAL_VALUE_TYPE value;
1100 REAL_VALUE_TYPE result;
1101 bool inexact;
1102 tree t, type;
1104 /* The following codes are handled by real_arithmetic. */
1105 switch (code)
1107 case PLUS_EXPR:
1108 case MINUS_EXPR:
1109 case MULT_EXPR:
1110 case RDIV_EXPR:
1111 case MIN_EXPR:
1112 case MAX_EXPR:
1113 break;
1115 default:
1116 return NULL_TREE;
1119 d1 = TREE_REAL_CST (arg1);
1120 d2 = TREE_REAL_CST (arg2);
1122 type = TREE_TYPE (arg1);
1123 mode = TYPE_MODE (type);
1125 /* Don't perform operation if we honor signaling NaNs and
1126 either operand is a NaN. */
1127 if (HONOR_SNANS (mode)
1128 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1129 return NULL_TREE;
1131 /* Don't perform operation if it would raise a division
1132 by zero exception. */
1133 if (code == RDIV_EXPR
1134 && REAL_VALUES_EQUAL (d2, dconst0)
1135 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1136 return NULL_TREE;
1138 /* If either operand is a NaN, just return it. Otherwise, set up
1139 for floating-point trap; we return an overflow. */
1140 if (REAL_VALUE_ISNAN (d1))
1141 return arg1;
1142 else if (REAL_VALUE_ISNAN (d2))
1143 return arg2;
1145 inexact = real_arithmetic (&value, code, &d1, &d2);
1146 real_convert (&result, mode, &value);
1148 /* Don't constant fold this floating point operation if
1149 the result has overflowed and flag_trapping_math. */
1150 if (flag_trapping_math
1151 && MODE_HAS_INFINITIES (mode)
1152 && REAL_VALUE_ISINF (result)
1153 && !REAL_VALUE_ISINF (d1)
1154 && !REAL_VALUE_ISINF (d2))
1155 return NULL_TREE;
1157 /* Don't constant fold this floating point operation if the
1158 result may dependent upon the run-time rounding mode and
1159 flag_rounding_math is set, or if GCC's software emulation
1160 is unable to accurately represent the result. */
1161 if ((flag_rounding_math
1162 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1163 && (inexact || !real_identical (&result, &value)))
1164 return NULL_TREE;
1166 t = build_real (type, result);
1168 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1169 return t;
1172 if (TREE_CODE (arg1) == FIXED_CST)
1174 FIXED_VALUE_TYPE f1;
1175 FIXED_VALUE_TYPE f2;
1176 FIXED_VALUE_TYPE result;
1177 tree t, type;
1178 int sat_p;
1179 bool overflow_p;
1181 /* The following codes are handled by fixed_arithmetic. */
1182 switch (code)
1184 case PLUS_EXPR:
1185 case MINUS_EXPR:
1186 case MULT_EXPR:
1187 case TRUNC_DIV_EXPR:
1188 f2 = TREE_FIXED_CST (arg2);
1189 break;
1191 case LSHIFT_EXPR:
1192 case RSHIFT_EXPR:
1193 f2.data.high = TREE_INT_CST_HIGH (arg2);
1194 f2.data.low = TREE_INT_CST_LOW (arg2);
1195 f2.mode = SImode;
1196 break;
1198 default:
1199 return NULL_TREE;
1202 f1 = TREE_FIXED_CST (arg1);
1203 type = TREE_TYPE (arg1);
1204 sat_p = TYPE_SATURATING (type);
1205 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1206 t = build_fixed (type, result);
1207 /* Propagate overflow flags. */
1208 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1209 TREE_OVERFLOW (t) = 1;
1210 return t;
1213 if (TREE_CODE (arg1) == COMPLEX_CST)
1215 tree type = TREE_TYPE (arg1);
1216 tree r1 = TREE_REALPART (arg1);
1217 tree i1 = TREE_IMAGPART (arg1);
1218 tree r2 = TREE_REALPART (arg2);
1219 tree i2 = TREE_IMAGPART (arg2);
1220 tree real, imag;
1222 switch (code)
1224 case PLUS_EXPR:
1225 case MINUS_EXPR:
1226 real = const_binop (code, r1, r2);
1227 imag = const_binop (code, i1, i2);
1228 break;
1230 case MULT_EXPR:
1231 if (COMPLEX_FLOAT_TYPE_P (type))
1232 return do_mpc_arg2 (arg1, arg2, type,
1233 /* do_nonfinite= */ folding_initializer,
1234 mpc_mul);
1236 real = const_binop (MINUS_EXPR,
1237 const_binop (MULT_EXPR, r1, r2),
1238 const_binop (MULT_EXPR, i1, i2));
1239 imag = const_binop (PLUS_EXPR,
1240 const_binop (MULT_EXPR, r1, i2),
1241 const_binop (MULT_EXPR, i1, r2));
1242 break;
1244 case RDIV_EXPR:
1245 if (COMPLEX_FLOAT_TYPE_P (type))
1246 return do_mpc_arg2 (arg1, arg2, type,
1247 /* do_nonfinite= */ folding_initializer,
1248 mpc_div);
1249 /* Fallthru ... */
1250 case TRUNC_DIV_EXPR:
1251 case CEIL_DIV_EXPR:
1252 case FLOOR_DIV_EXPR:
1253 case ROUND_DIV_EXPR:
1254 if (flag_complex_method == 0)
1256 /* Keep this algorithm in sync with
1257 tree-complex.c:expand_complex_div_straight().
1259 Expand complex division to scalars, straightforward algorithm.
1260 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1261 t = br*br + bi*bi
1263 tree magsquared
1264 = const_binop (PLUS_EXPR,
1265 const_binop (MULT_EXPR, r2, r2),
1266 const_binop (MULT_EXPR, i2, i2));
1267 tree t1
1268 = const_binop (PLUS_EXPR,
1269 const_binop (MULT_EXPR, r1, r2),
1270 const_binop (MULT_EXPR, i1, i2));
1271 tree t2
1272 = const_binop (MINUS_EXPR,
1273 const_binop (MULT_EXPR, i1, r2),
1274 const_binop (MULT_EXPR, r1, i2));
1276 real = const_binop (code, t1, magsquared);
1277 imag = const_binop (code, t2, magsquared);
1279 else
1281 /* Keep this algorithm in sync with
1282 tree-complex.c:expand_complex_div_wide().
1284 Expand complex division to scalars, modified algorithm to minimize
1285 overflow with wide input ranges. */
1286 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1287 fold_abs_const (r2, TREE_TYPE (type)),
1288 fold_abs_const (i2, TREE_TYPE (type)));
1290 if (integer_nonzerop (compare))
1292 /* In the TRUE branch, we compute
1293 ratio = br/bi;
1294 div = (br * ratio) + bi;
1295 tr = (ar * ratio) + ai;
1296 ti = (ai * ratio) - ar;
1297 tr = tr / div;
1298 ti = ti / div; */
1299 tree ratio = const_binop (code, r2, i2);
1300 tree div = const_binop (PLUS_EXPR, i2,
1301 const_binop (MULT_EXPR, r2, ratio));
1302 real = const_binop (MULT_EXPR, r1, ratio);
1303 real = const_binop (PLUS_EXPR, real, i1);
1304 real = const_binop (code, real, div);
1306 imag = const_binop (MULT_EXPR, i1, ratio);
1307 imag = const_binop (MINUS_EXPR, imag, r1);
1308 imag = const_binop (code, imag, div);
1310 else
1312 /* In the FALSE branch, we compute
1313 ratio = d/c;
1314 divisor = (d * ratio) + c;
1315 tr = (b * ratio) + a;
1316 ti = b - (a * ratio);
1317 tr = tr / div;
1318 ti = ti / div; */
1319 tree ratio = const_binop (code, i2, r2);
1320 tree div = const_binop (PLUS_EXPR, r2,
1321 const_binop (MULT_EXPR, i2, ratio));
1323 real = const_binop (MULT_EXPR, i1, ratio);
1324 real = const_binop (PLUS_EXPR, real, r1);
1325 real = const_binop (code, real, div);
1327 imag = const_binop (MULT_EXPR, r1, ratio);
1328 imag = const_binop (MINUS_EXPR, i1, imag);
1329 imag = const_binop (code, imag, div);
1332 break;
1334 default:
1335 return NULL_TREE;
1338 if (real && imag)
1339 return build_complex (type, real, imag);
1342 if (TREE_CODE (arg1) == VECTOR_CST
1343 && TREE_CODE (arg2) == VECTOR_CST)
1345 tree type = TREE_TYPE(arg1);
1346 int count = TYPE_VECTOR_SUBPARTS (type), i;
1347 tree *elts = XALLOCAVEC (tree, count);
1349 for (i = 0; i < count; i++)
1351 tree elem1 = VECTOR_CST_ELT (arg1, i);
1352 tree elem2 = VECTOR_CST_ELT (arg2, i);
1354 elts[i] = const_binop (code, elem1, elem2);
1356 /* It is possible that const_binop cannot handle the given
1357 code and return NULL_TREE */
1358 if(elts[i] == NULL_TREE)
1359 return NULL_TREE;
1362 return build_vector (type, elts);
1364 return NULL_TREE;
1367 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1368 indicates which particular sizetype to create. */
1370 tree
1371 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1373 return build_int_cst (sizetype_tab[(int) kind], number);
1376 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1377 is a tree code. The type of the result is taken from the operands.
1378 Both must be equivalent integer types, ala int_binop_types_match_p.
1379 If the operands are constant, so is the result. */
1381 tree
1382 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1384 tree type = TREE_TYPE (arg0);
1386 if (arg0 == error_mark_node || arg1 == error_mark_node)
1387 return error_mark_node;
1389 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1390 TREE_TYPE (arg1)));
1392 /* Handle the special case of two integer constants faster. */
1393 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1395 /* And some specific cases even faster than that. */
1396 if (code == PLUS_EXPR)
1398 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1399 return arg1;
1400 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1401 return arg0;
1403 else if (code == MINUS_EXPR)
1405 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1406 return arg0;
1408 else if (code == MULT_EXPR)
1410 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1411 return arg1;
1414 /* Handle general case of two integer constants. For sizetype
1415 constant calculations we always want to know about overflow,
1416 even in the unsigned case. */
1417 return int_const_binop_1 (code, arg0, arg1, -1);
1420 return fold_build2_loc (loc, code, type, arg0, arg1);
1423 /* Given two values, either both of sizetype or both of bitsizetype,
1424 compute the difference between the two values. Return the value
1425 in signed type corresponding to the type of the operands. */
1427 tree
1428 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1430 tree type = TREE_TYPE (arg0);
1431 tree ctype;
1433 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1434 TREE_TYPE (arg1)));
1436 /* If the type is already signed, just do the simple thing. */
1437 if (!TYPE_UNSIGNED (type))
1438 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1440 if (type == sizetype)
1441 ctype = ssizetype;
1442 else if (type == bitsizetype)
1443 ctype = sbitsizetype;
1444 else
1445 ctype = signed_type_for (type);
1447 /* If either operand is not a constant, do the conversions to the signed
1448 type and subtract. The hardware will do the right thing with any
1449 overflow in the subtraction. */
1450 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1451 return size_binop_loc (loc, MINUS_EXPR,
1452 fold_convert_loc (loc, ctype, arg0),
1453 fold_convert_loc (loc, ctype, arg1));
1455 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1456 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1457 overflow) and negate (which can't either). Special-case a result
1458 of zero while we're here. */
1459 if (tree_int_cst_equal (arg0, arg1))
1460 return build_int_cst (ctype, 0);
1461 else if (tree_int_cst_lt (arg1, arg0))
1462 return fold_convert_loc (loc, ctype,
1463 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1464 else
1465 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1466 fold_convert_loc (loc, ctype,
1467 size_binop_loc (loc,
1468 MINUS_EXPR,
1469 arg1, arg0)));
1472 /* A subroutine of fold_convert_const handling conversions of an
1473 INTEGER_CST to another integer type. */
1475 static tree
1476 fold_convert_const_int_from_int (tree type, const_tree arg1)
1478 tree t;
1480 /* Given an integer constant, make new constant with new type,
1481 appropriately sign-extended or truncated. */
1482 t = force_fit_type_double (type, tree_to_double_int (arg1),
1483 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1484 (TREE_INT_CST_HIGH (arg1) < 0
1485 && (TYPE_UNSIGNED (type)
1486 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1487 | TREE_OVERFLOW (arg1));
1489 return t;
1492 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1493 to an integer type. */
1495 static tree
1496 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1498 int overflow = 0;
1499 tree t;
1501 /* The following code implements the floating point to integer
1502 conversion rules required by the Java Language Specification,
1503 that IEEE NaNs are mapped to zero and values that overflow
1504 the target precision saturate, i.e. values greater than
1505 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1506 are mapped to INT_MIN. These semantics are allowed by the
1507 C and C++ standards that simply state that the behavior of
1508 FP-to-integer conversion is unspecified upon overflow. */
1510 double_int val;
1511 REAL_VALUE_TYPE r;
1512 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1514 switch (code)
1516 case FIX_TRUNC_EXPR:
1517 real_trunc (&r, VOIDmode, &x);
1518 break;
1520 default:
1521 gcc_unreachable ();
1524 /* If R is NaN, return zero and show we have an overflow. */
1525 if (REAL_VALUE_ISNAN (r))
1527 overflow = 1;
1528 val = double_int_zero;
1531 /* See if R is less than the lower bound or greater than the
1532 upper bound. */
1534 if (! overflow)
1536 tree lt = TYPE_MIN_VALUE (type);
1537 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1538 if (REAL_VALUES_LESS (r, l))
1540 overflow = 1;
1541 val = tree_to_double_int (lt);
1545 if (! overflow)
1547 tree ut = TYPE_MAX_VALUE (type);
1548 if (ut)
1550 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1551 if (REAL_VALUES_LESS (u, r))
1553 overflow = 1;
1554 val = tree_to_double_int (ut);
1559 if (! overflow)
1560 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1562 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1563 return t;
1566 /* A subroutine of fold_convert_const handling conversions of a
1567 FIXED_CST to an integer type. */
1569 static tree
1570 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1572 tree t;
1573 double_int temp, temp_trunc;
1574 unsigned int mode;
1576 /* Right shift FIXED_CST to temp by fbit. */
1577 temp = TREE_FIXED_CST (arg1).data;
1578 mode = TREE_FIXED_CST (arg1).mode;
1579 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1581 temp = temp.rshift (GET_MODE_FBIT (mode),
1582 HOST_BITS_PER_DOUBLE_INT,
1583 SIGNED_FIXED_POINT_MODE_P (mode));
1585 /* Left shift temp to temp_trunc by fbit. */
1586 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1587 HOST_BITS_PER_DOUBLE_INT,
1588 SIGNED_FIXED_POINT_MODE_P (mode));
1590 else
1592 temp = double_int_zero;
1593 temp_trunc = double_int_zero;
1596 /* If FIXED_CST is negative, we need to round the value toward 0.
1597 By checking if the fractional bits are not zero to add 1 to temp. */
1598 if (SIGNED_FIXED_POINT_MODE_P (mode)
1599 && temp_trunc.is_negative ()
1600 && TREE_FIXED_CST (arg1).data != temp_trunc)
1601 temp += double_int_one;
1603 /* Given a fixed-point constant, make new constant with new type,
1604 appropriately sign-extended or truncated. */
1605 t = force_fit_type_double (type, temp, -1,
1606 (temp.is_negative ()
1607 && (TYPE_UNSIGNED (type)
1608 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1609 | TREE_OVERFLOW (arg1));
1611 return t;
1614 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1615 to another floating point type. */
1617 static tree
1618 fold_convert_const_real_from_real (tree type, const_tree arg1)
1620 REAL_VALUE_TYPE value;
1621 tree t;
1623 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1624 t = build_real (type, value);
1626 /* If converting an infinity or NAN to a representation that doesn't
1627 have one, set the overflow bit so that we can produce some kind of
1628 error message at the appropriate point if necessary. It's not the
1629 most user-friendly message, but it's better than nothing. */
1630 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1631 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1632 TREE_OVERFLOW (t) = 1;
1633 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1634 && !MODE_HAS_NANS (TYPE_MODE (type)))
1635 TREE_OVERFLOW (t) = 1;
1636 /* Regular overflow, conversion produced an infinity in a mode that
1637 can't represent them. */
1638 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1639 && REAL_VALUE_ISINF (value)
1640 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1641 TREE_OVERFLOW (t) = 1;
1642 else
1643 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1644 return t;
1647 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1648 to a floating point type. */
1650 static tree
1651 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1653 REAL_VALUE_TYPE value;
1654 tree t;
1656 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1657 t = build_real (type, value);
1659 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1660 return t;
1663 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1664 to another fixed-point type. */
1666 static tree
1667 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1669 FIXED_VALUE_TYPE value;
1670 tree t;
1671 bool overflow_p;
1673 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1674 TYPE_SATURATING (type));
1675 t = build_fixed (type, value);
1677 /* Propagate overflow flags. */
1678 if (overflow_p | TREE_OVERFLOW (arg1))
1679 TREE_OVERFLOW (t) = 1;
1680 return t;
1683 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1684 to a fixed-point type. */
1686 static tree
1687 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1689 FIXED_VALUE_TYPE value;
1690 tree t;
1691 bool overflow_p;
1693 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1694 TREE_INT_CST (arg1),
1695 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1696 TYPE_SATURATING (type));
1697 t = build_fixed (type, value);
1699 /* Propagate overflow flags. */
1700 if (overflow_p | TREE_OVERFLOW (arg1))
1701 TREE_OVERFLOW (t) = 1;
1702 return t;
1705 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1706 to a fixed-point type. */
1708 static tree
1709 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1711 FIXED_VALUE_TYPE value;
1712 tree t;
1713 bool overflow_p;
1715 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1716 &TREE_REAL_CST (arg1),
1717 TYPE_SATURATING (type));
1718 t = build_fixed (type, value);
1720 /* Propagate overflow flags. */
1721 if (overflow_p | TREE_OVERFLOW (arg1))
1722 TREE_OVERFLOW (t) = 1;
1723 return t;
1726 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1727 type TYPE. If no simplification can be done return NULL_TREE. */
1729 static tree
1730 fold_convert_const (enum tree_code code, tree type, tree arg1)
1732 if (TREE_TYPE (arg1) == type)
1733 return arg1;
1735 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1736 || TREE_CODE (type) == OFFSET_TYPE)
1738 if (TREE_CODE (arg1) == INTEGER_CST)
1739 return fold_convert_const_int_from_int (type, arg1);
1740 else if (TREE_CODE (arg1) == REAL_CST)
1741 return fold_convert_const_int_from_real (code, type, arg1);
1742 else if (TREE_CODE (arg1) == FIXED_CST)
1743 return fold_convert_const_int_from_fixed (type, arg1);
1745 else if (TREE_CODE (type) == REAL_TYPE)
1747 if (TREE_CODE (arg1) == INTEGER_CST)
1748 return build_real_from_int_cst (type, arg1);
1749 else if (TREE_CODE (arg1) == REAL_CST)
1750 return fold_convert_const_real_from_real (type, arg1);
1751 else if (TREE_CODE (arg1) == FIXED_CST)
1752 return fold_convert_const_real_from_fixed (type, arg1);
1754 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1756 if (TREE_CODE (arg1) == FIXED_CST)
1757 return fold_convert_const_fixed_from_fixed (type, arg1);
1758 else if (TREE_CODE (arg1) == INTEGER_CST)
1759 return fold_convert_const_fixed_from_int (type, arg1);
1760 else if (TREE_CODE (arg1) == REAL_CST)
1761 return fold_convert_const_fixed_from_real (type, arg1);
1763 return NULL_TREE;
1766 /* Construct a vector of zero elements of vector type TYPE. */
1768 static tree
1769 build_zero_vector (tree type)
1771 tree t;
1773 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1774 return build_vector_from_val (type, t);
1777 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1779 bool
1780 fold_convertible_p (const_tree type, const_tree arg)
1782 tree orig = TREE_TYPE (arg);
1784 if (type == orig)
1785 return true;
1787 if (TREE_CODE (arg) == ERROR_MARK
1788 || TREE_CODE (type) == ERROR_MARK
1789 || TREE_CODE (orig) == ERROR_MARK)
1790 return false;
1792 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1793 return true;
1795 switch (TREE_CODE (type))
1797 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1798 case POINTER_TYPE: case REFERENCE_TYPE:
1799 case OFFSET_TYPE:
1800 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1801 || TREE_CODE (orig) == OFFSET_TYPE)
1802 return true;
1803 return (TREE_CODE (orig) == VECTOR_TYPE
1804 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1806 case REAL_TYPE:
1807 case FIXED_POINT_TYPE:
1808 case COMPLEX_TYPE:
1809 case VECTOR_TYPE:
1810 case VOID_TYPE:
1811 return TREE_CODE (type) == TREE_CODE (orig);
1813 default:
1814 return false;
1818 /* Convert expression ARG to type TYPE. Used by the middle-end for
1819 simple conversions in preference to calling the front-end's convert. */
1821 tree
1822 fold_convert_loc (location_t loc, tree type, tree arg)
1824 tree orig = TREE_TYPE (arg);
1825 tree tem;
1827 if (type == orig)
1828 return arg;
1830 if (TREE_CODE (arg) == ERROR_MARK
1831 || TREE_CODE (type) == ERROR_MARK
1832 || TREE_CODE (orig) == ERROR_MARK)
1833 return error_mark_node;
1835 switch (TREE_CODE (type))
1837 case POINTER_TYPE:
1838 case REFERENCE_TYPE:
1839 /* Handle conversions between pointers to different address spaces. */
1840 if (POINTER_TYPE_P (orig)
1841 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1842 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1843 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1844 /* fall through */
1846 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1847 case OFFSET_TYPE:
1848 if (TREE_CODE (arg) == INTEGER_CST)
1850 tem = fold_convert_const (NOP_EXPR, type, arg);
1851 if (tem != NULL_TREE)
1852 return tem;
1854 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1855 || TREE_CODE (orig) == OFFSET_TYPE)
1856 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1857 if (TREE_CODE (orig) == COMPLEX_TYPE)
1858 return fold_convert_loc (loc, type,
1859 fold_build1_loc (loc, REALPART_EXPR,
1860 TREE_TYPE (orig), arg));
1861 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1862 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1863 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1865 case REAL_TYPE:
1866 if (TREE_CODE (arg) == INTEGER_CST)
1868 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1869 if (tem != NULL_TREE)
1870 return tem;
1872 else if (TREE_CODE (arg) == REAL_CST)
1874 tem = fold_convert_const (NOP_EXPR, type, arg);
1875 if (tem != NULL_TREE)
1876 return tem;
1878 else if (TREE_CODE (arg) == FIXED_CST)
1880 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1881 if (tem != NULL_TREE)
1882 return tem;
1885 switch (TREE_CODE (orig))
1887 case INTEGER_TYPE:
1888 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1889 case POINTER_TYPE: case REFERENCE_TYPE:
1890 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1892 case REAL_TYPE:
1893 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1895 case FIXED_POINT_TYPE:
1896 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1898 case COMPLEX_TYPE:
1899 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1900 return fold_convert_loc (loc, type, tem);
1902 default:
1903 gcc_unreachable ();
1906 case FIXED_POINT_TYPE:
1907 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1908 || TREE_CODE (arg) == REAL_CST)
1910 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1911 if (tem != NULL_TREE)
1912 goto fold_convert_exit;
1915 switch (TREE_CODE (orig))
1917 case FIXED_POINT_TYPE:
1918 case INTEGER_TYPE:
1919 case ENUMERAL_TYPE:
1920 case BOOLEAN_TYPE:
1921 case REAL_TYPE:
1922 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1924 case COMPLEX_TYPE:
1925 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1926 return fold_convert_loc (loc, type, tem);
1928 default:
1929 gcc_unreachable ();
1932 case COMPLEX_TYPE:
1933 switch (TREE_CODE (orig))
1935 case INTEGER_TYPE:
1936 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1937 case POINTER_TYPE: case REFERENCE_TYPE:
1938 case REAL_TYPE:
1939 case FIXED_POINT_TYPE:
1940 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1941 fold_convert_loc (loc, TREE_TYPE (type), arg),
1942 fold_convert_loc (loc, TREE_TYPE (type),
1943 integer_zero_node));
1944 case COMPLEX_TYPE:
1946 tree rpart, ipart;
1948 if (TREE_CODE (arg) == COMPLEX_EXPR)
1950 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1951 TREE_OPERAND (arg, 0));
1952 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1953 TREE_OPERAND (arg, 1));
1954 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1957 arg = save_expr (arg);
1958 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1959 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1960 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1961 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1962 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1965 default:
1966 gcc_unreachable ();
1969 case VECTOR_TYPE:
1970 if (integer_zerop (arg))
1971 return build_zero_vector (type);
1972 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1973 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1974 || TREE_CODE (orig) == VECTOR_TYPE);
1975 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
1977 case VOID_TYPE:
1978 tem = fold_ignored_result (arg);
1979 return fold_build1_loc (loc, NOP_EXPR, type, tem);
1981 default:
1982 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1983 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1984 gcc_unreachable ();
1986 fold_convert_exit:
1987 protected_set_expr_location_unshare (tem, loc);
1988 return tem;
1991 /* Return false if expr can be assumed not to be an lvalue, true
1992 otherwise. */
1994 static bool
1995 maybe_lvalue_p (const_tree x)
1997 /* We only need to wrap lvalue tree codes. */
1998 switch (TREE_CODE (x))
2000 case VAR_DECL:
2001 case PARM_DECL:
2002 case RESULT_DECL:
2003 case LABEL_DECL:
2004 case FUNCTION_DECL:
2005 case SSA_NAME:
2007 case COMPONENT_REF:
2008 case MEM_REF:
2009 case INDIRECT_REF:
2010 case ARRAY_REF:
2011 case ARRAY_RANGE_REF:
2012 case BIT_FIELD_REF:
2013 case OBJ_TYPE_REF:
2015 case REALPART_EXPR:
2016 case IMAGPART_EXPR:
2017 case PREINCREMENT_EXPR:
2018 case PREDECREMENT_EXPR:
2019 case SAVE_EXPR:
2020 case TRY_CATCH_EXPR:
2021 case WITH_CLEANUP_EXPR:
2022 case COMPOUND_EXPR:
2023 case MODIFY_EXPR:
2024 case TARGET_EXPR:
2025 case COND_EXPR:
2026 case BIND_EXPR:
2027 break;
2029 default:
2030 /* Assume the worst for front-end tree codes. */
2031 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2032 break;
2033 return false;
2036 return true;
2039 /* Return an expr equal to X but certainly not valid as an lvalue. */
2041 tree
2042 non_lvalue_loc (location_t loc, tree x)
2044 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2045 us. */
2046 if (in_gimple_form)
2047 return x;
2049 if (! maybe_lvalue_p (x))
2050 return x;
2051 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2054 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2055 Zero means allow extended lvalues. */
2057 int pedantic_lvalues;
2059 /* When pedantic, return an expr equal to X but certainly not valid as a
2060 pedantic lvalue. Otherwise, return X. */
2062 static tree
2063 pedantic_non_lvalue_loc (location_t loc, tree x)
2065 if (pedantic_lvalues)
2066 return non_lvalue_loc (loc, x);
2068 return protected_set_expr_location_unshare (x, loc);
2071 /* Given a tree comparison code, return the code that is the logical inverse.
2072 It is generally not safe to do this for floating-point comparisons, except
2073 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2074 ERROR_MARK in this case. */
2076 enum tree_code
2077 invert_tree_comparison (enum tree_code code, bool honor_nans)
2079 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2080 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2081 return ERROR_MARK;
2083 switch (code)
2085 case EQ_EXPR:
2086 return NE_EXPR;
2087 case NE_EXPR:
2088 return EQ_EXPR;
2089 case GT_EXPR:
2090 return honor_nans ? UNLE_EXPR : LE_EXPR;
2091 case GE_EXPR:
2092 return honor_nans ? UNLT_EXPR : LT_EXPR;
2093 case LT_EXPR:
2094 return honor_nans ? UNGE_EXPR : GE_EXPR;
2095 case LE_EXPR:
2096 return honor_nans ? UNGT_EXPR : GT_EXPR;
2097 case LTGT_EXPR:
2098 return UNEQ_EXPR;
2099 case UNEQ_EXPR:
2100 return LTGT_EXPR;
2101 case UNGT_EXPR:
2102 return LE_EXPR;
2103 case UNGE_EXPR:
2104 return LT_EXPR;
2105 case UNLT_EXPR:
2106 return GE_EXPR;
2107 case UNLE_EXPR:
2108 return GT_EXPR;
2109 case ORDERED_EXPR:
2110 return UNORDERED_EXPR;
2111 case UNORDERED_EXPR:
2112 return ORDERED_EXPR;
2113 default:
2114 gcc_unreachable ();
2118 /* Similar, but return the comparison that results if the operands are
2119 swapped. This is safe for floating-point. */
2121 enum tree_code
2122 swap_tree_comparison (enum tree_code code)
2124 switch (code)
2126 case EQ_EXPR:
2127 case NE_EXPR:
2128 case ORDERED_EXPR:
2129 case UNORDERED_EXPR:
2130 case LTGT_EXPR:
2131 case UNEQ_EXPR:
2132 return code;
2133 case GT_EXPR:
2134 return LT_EXPR;
2135 case GE_EXPR:
2136 return LE_EXPR;
2137 case LT_EXPR:
2138 return GT_EXPR;
2139 case LE_EXPR:
2140 return GE_EXPR;
2141 case UNGT_EXPR:
2142 return UNLT_EXPR;
2143 case UNGE_EXPR:
2144 return UNLE_EXPR;
2145 case UNLT_EXPR:
2146 return UNGT_EXPR;
2147 case UNLE_EXPR:
2148 return UNGE_EXPR;
2149 default:
2150 gcc_unreachable ();
2155 /* Convert a comparison tree code from an enum tree_code representation
2156 into a compcode bit-based encoding. This function is the inverse of
2157 compcode_to_comparison. */
2159 static enum comparison_code
2160 comparison_to_compcode (enum tree_code code)
2162 switch (code)
2164 case LT_EXPR:
2165 return COMPCODE_LT;
2166 case EQ_EXPR:
2167 return COMPCODE_EQ;
2168 case LE_EXPR:
2169 return COMPCODE_LE;
2170 case GT_EXPR:
2171 return COMPCODE_GT;
2172 case NE_EXPR:
2173 return COMPCODE_NE;
2174 case GE_EXPR:
2175 return COMPCODE_GE;
2176 case ORDERED_EXPR:
2177 return COMPCODE_ORD;
2178 case UNORDERED_EXPR:
2179 return COMPCODE_UNORD;
2180 case UNLT_EXPR:
2181 return COMPCODE_UNLT;
2182 case UNEQ_EXPR:
2183 return COMPCODE_UNEQ;
2184 case UNLE_EXPR:
2185 return COMPCODE_UNLE;
2186 case UNGT_EXPR:
2187 return COMPCODE_UNGT;
2188 case LTGT_EXPR:
2189 return COMPCODE_LTGT;
2190 case UNGE_EXPR:
2191 return COMPCODE_UNGE;
2192 default:
2193 gcc_unreachable ();
2197 /* Convert a compcode bit-based encoding of a comparison operator back
2198 to GCC's enum tree_code representation. This function is the
2199 inverse of comparison_to_compcode. */
2201 static enum tree_code
2202 compcode_to_comparison (enum comparison_code code)
2204 switch (code)
2206 case COMPCODE_LT:
2207 return LT_EXPR;
2208 case COMPCODE_EQ:
2209 return EQ_EXPR;
2210 case COMPCODE_LE:
2211 return LE_EXPR;
2212 case COMPCODE_GT:
2213 return GT_EXPR;
2214 case COMPCODE_NE:
2215 return NE_EXPR;
2216 case COMPCODE_GE:
2217 return GE_EXPR;
2218 case COMPCODE_ORD:
2219 return ORDERED_EXPR;
2220 case COMPCODE_UNORD:
2221 return UNORDERED_EXPR;
2222 case COMPCODE_UNLT:
2223 return UNLT_EXPR;
2224 case COMPCODE_UNEQ:
2225 return UNEQ_EXPR;
2226 case COMPCODE_UNLE:
2227 return UNLE_EXPR;
2228 case COMPCODE_UNGT:
2229 return UNGT_EXPR;
2230 case COMPCODE_LTGT:
2231 return LTGT_EXPR;
2232 case COMPCODE_UNGE:
2233 return UNGE_EXPR;
2234 default:
2235 gcc_unreachable ();
2239 /* Return a tree for the comparison which is the combination of
2240 doing the AND or OR (depending on CODE) of the two operations LCODE
2241 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2242 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2243 if this makes the transformation invalid. */
2245 tree
2246 combine_comparisons (location_t loc,
2247 enum tree_code code, enum tree_code lcode,
2248 enum tree_code rcode, tree truth_type,
2249 tree ll_arg, tree lr_arg)
2251 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2252 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2253 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2254 int compcode;
2256 switch (code)
2258 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2259 compcode = lcompcode & rcompcode;
2260 break;
2262 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2263 compcode = lcompcode | rcompcode;
2264 break;
2266 default:
2267 return NULL_TREE;
2270 if (!honor_nans)
2272 /* Eliminate unordered comparisons, as well as LTGT and ORD
2273 which are not used unless the mode has NaNs. */
2274 compcode &= ~COMPCODE_UNORD;
2275 if (compcode == COMPCODE_LTGT)
2276 compcode = COMPCODE_NE;
2277 else if (compcode == COMPCODE_ORD)
2278 compcode = COMPCODE_TRUE;
2280 else if (flag_trapping_math)
2282 /* Check that the original operation and the optimized ones will trap
2283 under the same condition. */
2284 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2285 && (lcompcode != COMPCODE_EQ)
2286 && (lcompcode != COMPCODE_ORD);
2287 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2288 && (rcompcode != COMPCODE_EQ)
2289 && (rcompcode != COMPCODE_ORD);
2290 bool trap = (compcode & COMPCODE_UNORD) == 0
2291 && (compcode != COMPCODE_EQ)
2292 && (compcode != COMPCODE_ORD);
2294 /* In a short-circuited boolean expression the LHS might be
2295 such that the RHS, if evaluated, will never trap. For
2296 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2297 if neither x nor y is NaN. (This is a mixed blessing: for
2298 example, the expression above will never trap, hence
2299 optimizing it to x < y would be invalid). */
2300 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2301 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2302 rtrap = false;
2304 /* If the comparison was short-circuited, and only the RHS
2305 trapped, we may now generate a spurious trap. */
2306 if (rtrap && !ltrap
2307 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2308 return NULL_TREE;
2310 /* If we changed the conditions that cause a trap, we lose. */
2311 if ((ltrap || rtrap) != trap)
2312 return NULL_TREE;
2315 if (compcode == COMPCODE_TRUE)
2316 return constant_boolean_node (true, truth_type);
2317 else if (compcode == COMPCODE_FALSE)
2318 return constant_boolean_node (false, truth_type);
2319 else
2321 enum tree_code tcode;
2323 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2324 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2328 /* Return nonzero if two operands (typically of the same tree node)
2329 are necessarily equal. If either argument has side-effects this
2330 function returns zero. FLAGS modifies behavior as follows:
2332 If OEP_ONLY_CONST is set, only return nonzero for constants.
2333 This function tests whether the operands are indistinguishable;
2334 it does not test whether they are equal using C's == operation.
2335 The distinction is important for IEEE floating point, because
2336 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2337 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2339 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2340 even though it may hold multiple values during a function.
2341 This is because a GCC tree node guarantees that nothing else is
2342 executed between the evaluation of its "operands" (which may often
2343 be evaluated in arbitrary order). Hence if the operands themselves
2344 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2345 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2346 unset means assuming isochronic (or instantaneous) tree equivalence.
2347 Unless comparing arbitrary expression trees, such as from different
2348 statements, this flag can usually be left unset.
2350 If OEP_PURE_SAME is set, then pure functions with identical arguments
2351 are considered the same. It is used when the caller has other ways
2352 to ensure that global memory is unchanged in between. */
2355 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2357 /* If either is ERROR_MARK, they aren't equal. */
2358 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2359 || TREE_TYPE (arg0) == error_mark_node
2360 || TREE_TYPE (arg1) == error_mark_node)
2361 return 0;
2363 /* Similar, if either does not have a type (like a released SSA name),
2364 they aren't equal. */
2365 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2366 return 0;
2368 /* Check equality of integer constants before bailing out due to
2369 precision differences. */
2370 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2371 return tree_int_cst_equal (arg0, arg1);
2373 /* If both types don't have the same signedness, then we can't consider
2374 them equal. We must check this before the STRIP_NOPS calls
2375 because they may change the signedness of the arguments. As pointers
2376 strictly don't have a signedness, require either two pointers or
2377 two non-pointers as well. */
2378 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2379 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2380 return 0;
2382 /* We cannot consider pointers to different address space equal. */
2383 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2384 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2385 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2386 return 0;
2388 /* If both types don't have the same precision, then it is not safe
2389 to strip NOPs. */
2390 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2391 return 0;
2393 STRIP_NOPS (arg0);
2394 STRIP_NOPS (arg1);
2396 /* In case both args are comparisons but with different comparison
2397 code, try to swap the comparison operands of one arg to produce
2398 a match and compare that variant. */
2399 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2400 && COMPARISON_CLASS_P (arg0)
2401 && COMPARISON_CLASS_P (arg1))
2403 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2405 if (TREE_CODE (arg0) == swap_code)
2406 return operand_equal_p (TREE_OPERAND (arg0, 0),
2407 TREE_OPERAND (arg1, 1), flags)
2408 && operand_equal_p (TREE_OPERAND (arg0, 1),
2409 TREE_OPERAND (arg1, 0), flags);
2412 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2413 /* This is needed for conversions and for COMPONENT_REF.
2414 Might as well play it safe and always test this. */
2415 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2416 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2417 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2418 return 0;
2420 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2421 We don't care about side effects in that case because the SAVE_EXPR
2422 takes care of that for us. In all other cases, two expressions are
2423 equal if they have no side effects. If we have two identical
2424 expressions with side effects that should be treated the same due
2425 to the only side effects being identical SAVE_EXPR's, that will
2426 be detected in the recursive calls below.
2427 If we are taking an invariant address of two identical objects
2428 they are necessarily equal as well. */
2429 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2430 && (TREE_CODE (arg0) == SAVE_EXPR
2431 || (flags & OEP_CONSTANT_ADDRESS_OF)
2432 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2433 return 1;
2435 /* Next handle constant cases, those for which we can return 1 even
2436 if ONLY_CONST is set. */
2437 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2438 switch (TREE_CODE (arg0))
2440 case INTEGER_CST:
2441 return tree_int_cst_equal (arg0, arg1);
2443 case FIXED_CST:
2444 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2445 TREE_FIXED_CST (arg1));
2447 case REAL_CST:
2448 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2449 TREE_REAL_CST (arg1)))
2450 return 1;
2453 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2455 /* If we do not distinguish between signed and unsigned zero,
2456 consider them equal. */
2457 if (real_zerop (arg0) && real_zerop (arg1))
2458 return 1;
2460 return 0;
2462 case VECTOR_CST:
2464 unsigned i;
2466 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2467 return 0;
2469 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2471 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2472 VECTOR_CST_ELT (arg1, i), flags))
2473 return 0;
2475 return 1;
2478 case COMPLEX_CST:
2479 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2480 flags)
2481 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2482 flags));
2484 case STRING_CST:
2485 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2486 && ! memcmp (TREE_STRING_POINTER (arg0),
2487 TREE_STRING_POINTER (arg1),
2488 TREE_STRING_LENGTH (arg0)));
2490 case ADDR_EXPR:
2491 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2492 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2493 ? OEP_CONSTANT_ADDRESS_OF : 0);
2494 default:
2495 break;
2498 if (flags & OEP_ONLY_CONST)
2499 return 0;
2501 /* Define macros to test an operand from arg0 and arg1 for equality and a
2502 variant that allows null and views null as being different from any
2503 non-null value. In the latter case, if either is null, the both
2504 must be; otherwise, do the normal comparison. */
2505 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2506 TREE_OPERAND (arg1, N), flags)
2508 #define OP_SAME_WITH_NULL(N) \
2509 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2510 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2512 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2514 case tcc_unary:
2515 /* Two conversions are equal only if signedness and modes match. */
2516 switch (TREE_CODE (arg0))
2518 CASE_CONVERT:
2519 case FIX_TRUNC_EXPR:
2520 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2521 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2522 return 0;
2523 break;
2524 default:
2525 break;
2528 return OP_SAME (0);
2531 case tcc_comparison:
2532 case tcc_binary:
2533 if (OP_SAME (0) && OP_SAME (1))
2534 return 1;
2536 /* For commutative ops, allow the other order. */
2537 return (commutative_tree_code (TREE_CODE (arg0))
2538 && operand_equal_p (TREE_OPERAND (arg0, 0),
2539 TREE_OPERAND (arg1, 1), flags)
2540 && operand_equal_p (TREE_OPERAND (arg0, 1),
2541 TREE_OPERAND (arg1, 0), flags));
2543 case tcc_reference:
2544 /* If either of the pointer (or reference) expressions we are
2545 dereferencing contain a side effect, these cannot be equal,
2546 but their addresses can be. */
2547 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2548 && (TREE_SIDE_EFFECTS (arg0)
2549 || TREE_SIDE_EFFECTS (arg1)))
2550 return 0;
2552 switch (TREE_CODE (arg0))
2554 case INDIRECT_REF:
2555 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2556 return OP_SAME (0);
2558 case REALPART_EXPR:
2559 case IMAGPART_EXPR:
2560 return OP_SAME (0);
2562 case TARGET_MEM_REF:
2563 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2564 /* Require equal extra operands and then fall through to MEM_REF
2565 handling of the two common operands. */
2566 if (!OP_SAME_WITH_NULL (2)
2567 || !OP_SAME_WITH_NULL (3)
2568 || !OP_SAME_WITH_NULL (4))
2569 return 0;
2570 /* Fallthru. */
2571 case MEM_REF:
2572 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2573 /* Require equal access sizes, and similar pointer types.
2574 We can have incomplete types for array references of
2575 variable-sized arrays from the Fortran frontent
2576 though. */
2577 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2578 || (TYPE_SIZE (TREE_TYPE (arg0))
2579 && TYPE_SIZE (TREE_TYPE (arg1))
2580 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2581 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2582 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2583 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2584 && OP_SAME (0) && OP_SAME (1));
2586 case ARRAY_REF:
2587 case ARRAY_RANGE_REF:
2588 /* Operands 2 and 3 may be null.
2589 Compare the array index by value if it is constant first as we
2590 may have different types but same value here. */
2591 if (!OP_SAME (0))
2592 return 0;
2593 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2594 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2595 TREE_OPERAND (arg1, 1))
2596 || OP_SAME (1))
2597 && OP_SAME_WITH_NULL (2)
2598 && OP_SAME_WITH_NULL (3));
2600 case COMPONENT_REF:
2601 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2602 may be NULL when we're called to compare MEM_EXPRs. */
2603 if (!OP_SAME_WITH_NULL (0))
2604 return 0;
2605 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2606 return OP_SAME (1) && OP_SAME_WITH_NULL (2);
2608 case BIT_FIELD_REF:
2609 if (!OP_SAME (0))
2610 return 0;
2611 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2612 return OP_SAME (1) && OP_SAME (2);
2614 default:
2615 return 0;
2618 case tcc_expression:
2619 switch (TREE_CODE (arg0))
2621 case ADDR_EXPR:
2622 case TRUTH_NOT_EXPR:
2623 return OP_SAME (0);
2625 case TRUTH_ANDIF_EXPR:
2626 case TRUTH_ORIF_EXPR:
2627 return OP_SAME (0) && OP_SAME (1);
2629 case FMA_EXPR:
2630 case WIDEN_MULT_PLUS_EXPR:
2631 case WIDEN_MULT_MINUS_EXPR:
2632 if (!OP_SAME (2))
2633 return 0;
2634 /* The multiplcation operands are commutative. */
2635 /* FALLTHRU */
2637 case TRUTH_AND_EXPR:
2638 case TRUTH_OR_EXPR:
2639 case TRUTH_XOR_EXPR:
2640 if (OP_SAME (0) && OP_SAME (1))
2641 return 1;
2643 /* Otherwise take into account this is a commutative operation. */
2644 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2645 TREE_OPERAND (arg1, 1), flags)
2646 && operand_equal_p (TREE_OPERAND (arg0, 1),
2647 TREE_OPERAND (arg1, 0), flags));
2649 case COND_EXPR:
2650 case VEC_COND_EXPR:
2651 case DOT_PROD_EXPR:
2652 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2654 default:
2655 return 0;
2658 case tcc_vl_exp:
2659 switch (TREE_CODE (arg0))
2661 case CALL_EXPR:
2662 /* If the CALL_EXPRs call different functions, then they
2663 clearly can not be equal. */
2664 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2665 flags))
2666 return 0;
2669 unsigned int cef = call_expr_flags (arg0);
2670 if (flags & OEP_PURE_SAME)
2671 cef &= ECF_CONST | ECF_PURE;
2672 else
2673 cef &= ECF_CONST;
2674 if (!cef)
2675 return 0;
2678 /* Now see if all the arguments are the same. */
2680 const_call_expr_arg_iterator iter0, iter1;
2681 const_tree a0, a1;
2682 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2683 a1 = first_const_call_expr_arg (arg1, &iter1);
2684 a0 && a1;
2685 a0 = next_const_call_expr_arg (&iter0),
2686 a1 = next_const_call_expr_arg (&iter1))
2687 if (! operand_equal_p (a0, a1, flags))
2688 return 0;
2690 /* If we get here and both argument lists are exhausted
2691 then the CALL_EXPRs are equal. */
2692 return ! (a0 || a1);
2694 default:
2695 return 0;
2698 case tcc_declaration:
2699 /* Consider __builtin_sqrt equal to sqrt. */
2700 return (TREE_CODE (arg0) == FUNCTION_DECL
2701 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2702 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2703 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2705 default:
2706 return 0;
2709 #undef OP_SAME
2710 #undef OP_SAME_WITH_NULL
2713 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2714 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2716 When in doubt, return 0. */
2718 static int
2719 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2721 int unsignedp1, unsignedpo;
2722 tree primarg0, primarg1, primother;
2723 unsigned int correct_width;
2725 if (operand_equal_p (arg0, arg1, 0))
2726 return 1;
2728 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2729 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2730 return 0;
2732 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2733 and see if the inner values are the same. This removes any
2734 signedness comparison, which doesn't matter here. */
2735 primarg0 = arg0, primarg1 = arg1;
2736 STRIP_NOPS (primarg0);
2737 STRIP_NOPS (primarg1);
2738 if (operand_equal_p (primarg0, primarg1, 0))
2739 return 1;
2741 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2742 actual comparison operand, ARG0.
2744 First throw away any conversions to wider types
2745 already present in the operands. */
2747 primarg1 = get_narrower (arg1, &unsignedp1);
2748 primother = get_narrower (other, &unsignedpo);
2750 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2751 if (unsignedp1 == unsignedpo
2752 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2753 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2755 tree type = TREE_TYPE (arg0);
2757 /* Make sure shorter operand is extended the right way
2758 to match the longer operand. */
2759 primarg1 = fold_convert (signed_or_unsigned_type_for
2760 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2762 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2763 return 1;
2766 return 0;
2769 /* See if ARG is an expression that is either a comparison or is performing
2770 arithmetic on comparisons. The comparisons must only be comparing
2771 two different values, which will be stored in *CVAL1 and *CVAL2; if
2772 they are nonzero it means that some operands have already been found.
2773 No variables may be used anywhere else in the expression except in the
2774 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2775 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2777 If this is true, return 1. Otherwise, return zero. */
2779 static int
2780 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2782 enum tree_code code = TREE_CODE (arg);
2783 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2785 /* We can handle some of the tcc_expression cases here. */
2786 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2787 tclass = tcc_unary;
2788 else if (tclass == tcc_expression
2789 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2790 || code == COMPOUND_EXPR))
2791 tclass = tcc_binary;
2793 else if (tclass == tcc_expression && code == SAVE_EXPR
2794 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2796 /* If we've already found a CVAL1 or CVAL2, this expression is
2797 two complex to handle. */
2798 if (*cval1 || *cval2)
2799 return 0;
2801 tclass = tcc_unary;
2802 *save_p = 1;
2805 switch (tclass)
2807 case tcc_unary:
2808 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2810 case tcc_binary:
2811 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2812 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2813 cval1, cval2, save_p));
2815 case tcc_constant:
2816 return 1;
2818 case tcc_expression:
2819 if (code == COND_EXPR)
2820 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2821 cval1, cval2, save_p)
2822 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2823 cval1, cval2, save_p)
2824 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2825 cval1, cval2, save_p));
2826 return 0;
2828 case tcc_comparison:
2829 /* First see if we can handle the first operand, then the second. For
2830 the second operand, we know *CVAL1 can't be zero. It must be that
2831 one side of the comparison is each of the values; test for the
2832 case where this isn't true by failing if the two operands
2833 are the same. */
2835 if (operand_equal_p (TREE_OPERAND (arg, 0),
2836 TREE_OPERAND (arg, 1), 0))
2837 return 0;
2839 if (*cval1 == 0)
2840 *cval1 = TREE_OPERAND (arg, 0);
2841 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2843 else if (*cval2 == 0)
2844 *cval2 = TREE_OPERAND (arg, 0);
2845 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2847 else
2848 return 0;
2850 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2852 else if (*cval2 == 0)
2853 *cval2 = TREE_OPERAND (arg, 1);
2854 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2856 else
2857 return 0;
2859 return 1;
2861 default:
2862 return 0;
2866 /* ARG is a tree that is known to contain just arithmetic operations and
2867 comparisons. Evaluate the operations in the tree substituting NEW0 for
2868 any occurrence of OLD0 as an operand of a comparison and likewise for
2869 NEW1 and OLD1. */
2871 static tree
2872 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2873 tree old1, tree new1)
2875 tree type = TREE_TYPE (arg);
2876 enum tree_code code = TREE_CODE (arg);
2877 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2879 /* We can handle some of the tcc_expression cases here. */
2880 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2881 tclass = tcc_unary;
2882 else if (tclass == tcc_expression
2883 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2884 tclass = tcc_binary;
2886 switch (tclass)
2888 case tcc_unary:
2889 return fold_build1_loc (loc, code, type,
2890 eval_subst (loc, TREE_OPERAND (arg, 0),
2891 old0, new0, old1, new1));
2893 case tcc_binary:
2894 return fold_build2_loc (loc, code, type,
2895 eval_subst (loc, TREE_OPERAND (arg, 0),
2896 old0, new0, old1, new1),
2897 eval_subst (loc, TREE_OPERAND (arg, 1),
2898 old0, new0, old1, new1));
2900 case tcc_expression:
2901 switch (code)
2903 case SAVE_EXPR:
2904 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2905 old1, new1);
2907 case COMPOUND_EXPR:
2908 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2909 old1, new1);
2911 case COND_EXPR:
2912 return fold_build3_loc (loc, code, type,
2913 eval_subst (loc, TREE_OPERAND (arg, 0),
2914 old0, new0, old1, new1),
2915 eval_subst (loc, TREE_OPERAND (arg, 1),
2916 old0, new0, old1, new1),
2917 eval_subst (loc, TREE_OPERAND (arg, 2),
2918 old0, new0, old1, new1));
2919 default:
2920 break;
2922 /* Fall through - ??? */
2924 case tcc_comparison:
2926 tree arg0 = TREE_OPERAND (arg, 0);
2927 tree arg1 = TREE_OPERAND (arg, 1);
2929 /* We need to check both for exact equality and tree equality. The
2930 former will be true if the operand has a side-effect. In that
2931 case, we know the operand occurred exactly once. */
2933 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2934 arg0 = new0;
2935 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2936 arg0 = new1;
2938 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2939 arg1 = new0;
2940 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2941 arg1 = new1;
2943 return fold_build2_loc (loc, code, type, arg0, arg1);
2946 default:
2947 return arg;
2951 /* Return a tree for the case when the result of an expression is RESULT
2952 converted to TYPE and OMITTED was previously an operand of the expression
2953 but is now not needed (e.g., we folded OMITTED * 0).
2955 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2956 the conversion of RESULT to TYPE. */
2958 tree
2959 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2961 tree t = fold_convert_loc (loc, type, result);
2963 /* If the resulting operand is an empty statement, just return the omitted
2964 statement casted to void. */
2965 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2966 return build1_loc (loc, NOP_EXPR, void_type_node,
2967 fold_ignored_result (omitted));
2969 if (TREE_SIDE_EFFECTS (omitted))
2970 return build2_loc (loc, COMPOUND_EXPR, type,
2971 fold_ignored_result (omitted), t);
2973 return non_lvalue_loc (loc, t);
2976 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2978 static tree
2979 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2980 tree omitted)
2982 tree t = fold_convert_loc (loc, type, result);
2984 /* If the resulting operand is an empty statement, just return the omitted
2985 statement casted to void. */
2986 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2987 return build1_loc (loc, NOP_EXPR, void_type_node,
2988 fold_ignored_result (omitted));
2990 if (TREE_SIDE_EFFECTS (omitted))
2991 return build2_loc (loc, COMPOUND_EXPR, type,
2992 fold_ignored_result (omitted), t);
2994 return pedantic_non_lvalue_loc (loc, t);
2997 /* Return a tree for the case when the result of an expression is RESULT
2998 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2999 of the expression but are now not needed.
3001 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3002 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3003 evaluated before OMITTED2. Otherwise, if neither has side effects,
3004 just do the conversion of RESULT to TYPE. */
3006 tree
3007 omit_two_operands_loc (location_t loc, tree type, tree result,
3008 tree omitted1, tree omitted2)
3010 tree t = fold_convert_loc (loc, type, result);
3012 if (TREE_SIDE_EFFECTS (omitted2))
3013 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3014 if (TREE_SIDE_EFFECTS (omitted1))
3015 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3017 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3021 /* Return a simplified tree node for the truth-negation of ARG. This
3022 never alters ARG itself. We assume that ARG is an operation that
3023 returns a truth value (0 or 1).
3025 FIXME: one would think we would fold the result, but it causes
3026 problems with the dominator optimizer. */
3028 tree
3029 fold_truth_not_expr (location_t loc, tree arg)
3031 tree type = TREE_TYPE (arg);
3032 enum tree_code code = TREE_CODE (arg);
3033 location_t loc1, loc2;
3035 /* If this is a comparison, we can simply invert it, except for
3036 floating-point non-equality comparisons, in which case we just
3037 enclose a TRUTH_NOT_EXPR around what we have. */
3039 if (TREE_CODE_CLASS (code) == tcc_comparison)
3041 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3042 if (FLOAT_TYPE_P (op_type)
3043 && flag_trapping_math
3044 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3045 && code != NE_EXPR && code != EQ_EXPR)
3046 return NULL_TREE;
3048 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3049 if (code == ERROR_MARK)
3050 return NULL_TREE;
3052 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3053 TREE_OPERAND (arg, 1));
3056 switch (code)
3058 case INTEGER_CST:
3059 return constant_boolean_node (integer_zerop (arg), type);
3061 case TRUTH_AND_EXPR:
3062 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3063 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3064 return build2_loc (loc, TRUTH_OR_EXPR, type,
3065 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3066 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3068 case TRUTH_OR_EXPR:
3069 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3070 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3071 return build2_loc (loc, TRUTH_AND_EXPR, type,
3072 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3073 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3075 case TRUTH_XOR_EXPR:
3076 /* Here we can invert either operand. We invert the first operand
3077 unless the second operand is a TRUTH_NOT_EXPR in which case our
3078 result is the XOR of the first operand with the inside of the
3079 negation of the second operand. */
3081 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3082 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3083 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3084 else
3085 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3086 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3087 TREE_OPERAND (arg, 1));
3089 case TRUTH_ANDIF_EXPR:
3090 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3091 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3092 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3093 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3094 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3096 case TRUTH_ORIF_EXPR:
3097 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3098 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3099 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3100 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3101 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3103 case TRUTH_NOT_EXPR:
3104 return TREE_OPERAND (arg, 0);
3106 case COND_EXPR:
3108 tree arg1 = TREE_OPERAND (arg, 1);
3109 tree arg2 = TREE_OPERAND (arg, 2);
3111 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3112 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3114 /* A COND_EXPR may have a throw as one operand, which
3115 then has void type. Just leave void operands
3116 as they are. */
3117 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3118 VOID_TYPE_P (TREE_TYPE (arg1))
3119 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3120 VOID_TYPE_P (TREE_TYPE (arg2))
3121 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3124 case COMPOUND_EXPR:
3125 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3126 return build2_loc (loc, COMPOUND_EXPR, type,
3127 TREE_OPERAND (arg, 0),
3128 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3130 case NON_LVALUE_EXPR:
3131 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3132 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3134 CASE_CONVERT:
3135 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3136 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3138 /* ... fall through ... */
3140 case FLOAT_EXPR:
3141 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3142 return build1_loc (loc, TREE_CODE (arg), type,
3143 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3145 case BIT_AND_EXPR:
3146 if (!integer_onep (TREE_OPERAND (arg, 1)))
3147 return NULL_TREE;
3148 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3150 case SAVE_EXPR:
3151 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3153 case CLEANUP_POINT_EXPR:
3154 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3155 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3156 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3158 default:
3159 return NULL_TREE;
3163 /* Return a simplified tree node for the truth-negation of ARG. This
3164 never alters ARG itself. We assume that ARG is an operation that
3165 returns a truth value (0 or 1).
3167 FIXME: one would think we would fold the result, but it causes
3168 problems with the dominator optimizer. */
3170 tree
3171 invert_truthvalue_loc (location_t loc, tree arg)
3173 tree tem;
3175 if (TREE_CODE (arg) == ERROR_MARK)
3176 return arg;
3178 tem = fold_truth_not_expr (loc, arg);
3179 if (!tem)
3180 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3182 return tem;
3185 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3186 operands are another bit-wise operation with a common input. If so,
3187 distribute the bit operations to save an operation and possibly two if
3188 constants are involved. For example, convert
3189 (A | B) & (A | C) into A | (B & C)
3190 Further simplification will occur if B and C are constants.
3192 If this optimization cannot be done, 0 will be returned. */
3194 static tree
3195 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3196 tree arg0, tree arg1)
3198 tree common;
3199 tree left, right;
3201 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3202 || TREE_CODE (arg0) == code
3203 || (TREE_CODE (arg0) != BIT_AND_EXPR
3204 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3205 return 0;
3207 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3209 common = TREE_OPERAND (arg0, 0);
3210 left = TREE_OPERAND (arg0, 1);
3211 right = TREE_OPERAND (arg1, 1);
3213 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3215 common = TREE_OPERAND (arg0, 0);
3216 left = TREE_OPERAND (arg0, 1);
3217 right = TREE_OPERAND (arg1, 0);
3219 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3221 common = TREE_OPERAND (arg0, 1);
3222 left = TREE_OPERAND (arg0, 0);
3223 right = TREE_OPERAND (arg1, 1);
3225 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3227 common = TREE_OPERAND (arg0, 1);
3228 left = TREE_OPERAND (arg0, 0);
3229 right = TREE_OPERAND (arg1, 0);
3231 else
3232 return 0;
3234 common = fold_convert_loc (loc, type, common);
3235 left = fold_convert_loc (loc, type, left);
3236 right = fold_convert_loc (loc, type, right);
3237 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3238 fold_build2_loc (loc, code, type, left, right));
3241 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3242 with code CODE. This optimization is unsafe. */
3243 static tree
3244 distribute_real_division (location_t loc, enum tree_code code, tree type,
3245 tree arg0, tree arg1)
3247 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3248 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3250 /* (A / C) +- (B / C) -> (A +- B) / C. */
3251 if (mul0 == mul1
3252 && operand_equal_p (TREE_OPERAND (arg0, 1),
3253 TREE_OPERAND (arg1, 1), 0))
3254 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3255 fold_build2_loc (loc, code, type,
3256 TREE_OPERAND (arg0, 0),
3257 TREE_OPERAND (arg1, 0)),
3258 TREE_OPERAND (arg0, 1));
3260 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3261 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3262 TREE_OPERAND (arg1, 0), 0)
3263 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3264 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3266 REAL_VALUE_TYPE r0, r1;
3267 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3268 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3269 if (!mul0)
3270 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3271 if (!mul1)
3272 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3273 real_arithmetic (&r0, code, &r0, &r1);
3274 return fold_build2_loc (loc, MULT_EXPR, type,
3275 TREE_OPERAND (arg0, 0),
3276 build_real (type, r0));
3279 return NULL_TREE;
3282 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3283 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3285 static tree
3286 make_bit_field_ref (location_t loc, tree inner, tree type,
3287 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3289 tree result, bftype;
3291 if (bitpos == 0)
3293 tree size = TYPE_SIZE (TREE_TYPE (inner));
3294 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3295 || POINTER_TYPE_P (TREE_TYPE (inner)))
3296 && host_integerp (size, 0)
3297 && tree_low_cst (size, 0) == bitsize)
3298 return fold_convert_loc (loc, type, inner);
3301 bftype = type;
3302 if (TYPE_PRECISION (bftype) != bitsize
3303 || TYPE_UNSIGNED (bftype) == !unsignedp)
3304 bftype = build_nonstandard_integer_type (bitsize, 0);
3306 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3307 size_int (bitsize), bitsize_int (bitpos));
3309 if (bftype != type)
3310 result = fold_convert_loc (loc, type, result);
3312 return result;
3315 /* Optimize a bit-field compare.
3317 There are two cases: First is a compare against a constant and the
3318 second is a comparison of two items where the fields are at the same
3319 bit position relative to the start of a chunk (byte, halfword, word)
3320 large enough to contain it. In these cases we can avoid the shift
3321 implicit in bitfield extractions.
3323 For constants, we emit a compare of the shifted constant with the
3324 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3325 compared. For two fields at the same position, we do the ANDs with the
3326 similar mask and compare the result of the ANDs.
3328 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3329 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3330 are the left and right operands of the comparison, respectively.
3332 If the optimization described above can be done, we return the resulting
3333 tree. Otherwise we return zero. */
3335 static tree
3336 optimize_bit_field_compare (location_t loc, enum tree_code code,
3337 tree compare_type, tree lhs, tree rhs)
3339 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3340 tree type = TREE_TYPE (lhs);
3341 tree signed_type, unsigned_type;
3342 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3343 enum machine_mode lmode, rmode, nmode;
3344 int lunsignedp, runsignedp;
3345 int lvolatilep = 0, rvolatilep = 0;
3346 tree linner, rinner = NULL_TREE;
3347 tree mask;
3348 tree offset;
3350 /* In the strict volatile bitfields case, doing code changes here may prevent
3351 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3352 if (flag_strict_volatile_bitfields > 0)
3353 return 0;
3355 /* Get all the information about the extractions being done. If the bit size
3356 if the same as the size of the underlying object, we aren't doing an
3357 extraction at all and so can do nothing. We also don't want to
3358 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3359 then will no longer be able to replace it. */
3360 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3361 &lunsignedp, &lvolatilep, false);
3362 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3363 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3364 return 0;
3366 if (!const_p)
3368 /* If this is not a constant, we can only do something if bit positions,
3369 sizes, and signedness are the same. */
3370 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3371 &runsignedp, &rvolatilep, false);
3373 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3374 || lunsignedp != runsignedp || offset != 0
3375 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3376 return 0;
3379 /* See if we can find a mode to refer to this field. We should be able to,
3380 but fail if we can't. */
3381 if (lvolatilep
3382 && GET_MODE_BITSIZE (lmode) > 0
3383 && flag_strict_volatile_bitfields > 0)
3384 nmode = lmode;
3385 else
3386 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3387 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3388 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3389 TYPE_ALIGN (TREE_TYPE (rinner))),
3390 word_mode, lvolatilep || rvolatilep);
3391 if (nmode == VOIDmode)
3392 return 0;
3394 /* Set signed and unsigned types of the precision of this mode for the
3395 shifts below. */
3396 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3397 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3399 /* Compute the bit position and size for the new reference and our offset
3400 within it. If the new reference is the same size as the original, we
3401 won't optimize anything, so return zero. */
3402 nbitsize = GET_MODE_BITSIZE (nmode);
3403 nbitpos = lbitpos & ~ (nbitsize - 1);
3404 lbitpos -= nbitpos;
3405 if (nbitsize == lbitsize)
3406 return 0;
3408 if (BYTES_BIG_ENDIAN)
3409 lbitpos = nbitsize - lbitsize - lbitpos;
3411 /* Make the mask to be used against the extracted field. */
3412 mask = build_int_cst_type (unsigned_type, -1);
3413 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3414 mask = const_binop (RSHIFT_EXPR, mask,
3415 size_int (nbitsize - lbitsize - lbitpos));
3417 if (! const_p)
3418 /* If not comparing with constant, just rework the comparison
3419 and return. */
3420 return fold_build2_loc (loc, code, compare_type,
3421 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3422 make_bit_field_ref (loc, linner,
3423 unsigned_type,
3424 nbitsize, nbitpos,
3426 mask),
3427 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3428 make_bit_field_ref (loc, rinner,
3429 unsigned_type,
3430 nbitsize, nbitpos,
3432 mask));
3434 /* Otherwise, we are handling the constant case. See if the constant is too
3435 big for the field. Warn and return a tree of for 0 (false) if so. We do
3436 this not only for its own sake, but to avoid having to test for this
3437 error case below. If we didn't, we might generate wrong code.
3439 For unsigned fields, the constant shifted right by the field length should
3440 be all zero. For signed fields, the high-order bits should agree with
3441 the sign bit. */
3443 if (lunsignedp)
3445 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3446 fold_convert_loc (loc,
3447 unsigned_type, rhs),
3448 size_int (lbitsize))))
3450 warning (0, "comparison is always %d due to width of bit-field",
3451 code == NE_EXPR);
3452 return constant_boolean_node (code == NE_EXPR, compare_type);
3455 else
3457 tree tem = const_binop (RSHIFT_EXPR,
3458 fold_convert_loc (loc, signed_type, rhs),
3459 size_int (lbitsize - 1));
3460 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3462 warning (0, "comparison is always %d due to width of bit-field",
3463 code == NE_EXPR);
3464 return constant_boolean_node (code == NE_EXPR, compare_type);
3468 /* Single-bit compares should always be against zero. */
3469 if (lbitsize == 1 && ! integer_zerop (rhs))
3471 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3472 rhs = build_int_cst (type, 0);
3475 /* Make a new bitfield reference, shift the constant over the
3476 appropriate number of bits and mask it with the computed mask
3477 (in case this was a signed field). If we changed it, make a new one. */
3478 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3479 if (lvolatilep)
3481 TREE_SIDE_EFFECTS (lhs) = 1;
3482 TREE_THIS_VOLATILE (lhs) = 1;
3485 rhs = const_binop (BIT_AND_EXPR,
3486 const_binop (LSHIFT_EXPR,
3487 fold_convert_loc (loc, unsigned_type, rhs),
3488 size_int (lbitpos)),
3489 mask);
3491 lhs = build2_loc (loc, code, compare_type,
3492 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3493 return lhs;
3496 /* Subroutine for fold_truth_andor_1: decode a field reference.
3498 If EXP is a comparison reference, we return the innermost reference.
3500 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3501 set to the starting bit number.
3503 If the innermost field can be completely contained in a mode-sized
3504 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3506 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3507 otherwise it is not changed.
3509 *PUNSIGNEDP is set to the signedness of the field.
3511 *PMASK is set to the mask used. This is either contained in a
3512 BIT_AND_EXPR or derived from the width of the field.
3514 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3516 Return 0 if this is not a component reference or is one that we can't
3517 do anything with. */
3519 static tree
3520 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3521 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3522 int *punsignedp, int *pvolatilep,
3523 tree *pmask, tree *pand_mask)
3525 tree outer_type = 0;
3526 tree and_mask = 0;
3527 tree mask, inner, offset;
3528 tree unsigned_type;
3529 unsigned int precision;
3531 /* All the optimizations using this function assume integer fields.
3532 There are problems with FP fields since the type_for_size call
3533 below can fail for, e.g., XFmode. */
3534 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3535 return 0;
3537 /* We are interested in the bare arrangement of bits, so strip everything
3538 that doesn't affect the machine mode. However, record the type of the
3539 outermost expression if it may matter below. */
3540 if (CONVERT_EXPR_P (exp)
3541 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3542 outer_type = TREE_TYPE (exp);
3543 STRIP_NOPS (exp);
3545 if (TREE_CODE (exp) == BIT_AND_EXPR)
3547 and_mask = TREE_OPERAND (exp, 1);
3548 exp = TREE_OPERAND (exp, 0);
3549 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3550 if (TREE_CODE (and_mask) != INTEGER_CST)
3551 return 0;
3554 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3555 punsignedp, pvolatilep, false);
3556 if ((inner == exp && and_mask == 0)
3557 || *pbitsize < 0 || offset != 0
3558 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3559 return 0;
3561 /* If the number of bits in the reference is the same as the bitsize of
3562 the outer type, then the outer type gives the signedness. Otherwise
3563 (in case of a small bitfield) the signedness is unchanged. */
3564 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3565 *punsignedp = TYPE_UNSIGNED (outer_type);
3567 /* Compute the mask to access the bitfield. */
3568 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3569 precision = TYPE_PRECISION (unsigned_type);
3571 mask = build_int_cst_type (unsigned_type, -1);
3573 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3574 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3576 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3577 if (and_mask != 0)
3578 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3579 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3581 *pmask = mask;
3582 *pand_mask = and_mask;
3583 return inner;
3586 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3587 bit positions. */
3589 static int
3590 all_ones_mask_p (const_tree mask, int size)
3592 tree type = TREE_TYPE (mask);
3593 unsigned int precision = TYPE_PRECISION (type);
3594 tree tmask;
3596 tmask = build_int_cst_type (signed_type_for (type), -1);
3598 return
3599 tree_int_cst_equal (mask,
3600 const_binop (RSHIFT_EXPR,
3601 const_binop (LSHIFT_EXPR, tmask,
3602 size_int (precision - size)),
3603 size_int (precision - size)));
3606 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3607 represents the sign bit of EXP's type. If EXP represents a sign
3608 or zero extension, also test VAL against the unextended type.
3609 The return value is the (sub)expression whose sign bit is VAL,
3610 or NULL_TREE otherwise. */
3612 static tree
3613 sign_bit_p (tree exp, const_tree val)
3615 unsigned HOST_WIDE_INT mask_lo, lo;
3616 HOST_WIDE_INT mask_hi, hi;
3617 int width;
3618 tree t;
3620 /* Tree EXP must have an integral type. */
3621 t = TREE_TYPE (exp);
3622 if (! INTEGRAL_TYPE_P (t))
3623 return NULL_TREE;
3625 /* Tree VAL must be an integer constant. */
3626 if (TREE_CODE (val) != INTEGER_CST
3627 || TREE_OVERFLOW (val))
3628 return NULL_TREE;
3630 width = TYPE_PRECISION (t);
3631 if (width > HOST_BITS_PER_WIDE_INT)
3633 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3634 lo = 0;
3636 mask_hi = ((unsigned HOST_WIDE_INT) -1
3637 >> (HOST_BITS_PER_DOUBLE_INT - width));
3638 mask_lo = -1;
3640 else
3642 hi = 0;
3643 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3645 mask_hi = 0;
3646 mask_lo = ((unsigned HOST_WIDE_INT) -1
3647 >> (HOST_BITS_PER_WIDE_INT - width));
3650 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3651 treat VAL as if it were unsigned. */
3652 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3653 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3654 return exp;
3656 /* Handle extension from a narrower type. */
3657 if (TREE_CODE (exp) == NOP_EXPR
3658 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3659 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3661 return NULL_TREE;
3664 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3665 to be evaluated unconditionally. */
3667 static int
3668 simple_operand_p (const_tree exp)
3670 /* Strip any conversions that don't change the machine mode. */
3671 STRIP_NOPS (exp);
3673 return (CONSTANT_CLASS_P (exp)
3674 || TREE_CODE (exp) == SSA_NAME
3675 || (DECL_P (exp)
3676 && ! TREE_ADDRESSABLE (exp)
3677 && ! TREE_THIS_VOLATILE (exp)
3678 && ! DECL_NONLOCAL (exp)
3679 /* Don't regard global variables as simple. They may be
3680 allocated in ways unknown to the compiler (shared memory,
3681 #pragma weak, etc). */
3682 && ! TREE_PUBLIC (exp)
3683 && ! DECL_EXTERNAL (exp)
3684 /* Loading a static variable is unduly expensive, but global
3685 registers aren't expensive. */
3686 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3689 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3690 to be evaluated unconditionally.
3691 I addition to simple_operand_p, we assume that comparisons, conversions,
3692 and logic-not operations are simple, if their operands are simple, too. */
3694 static bool
3695 simple_operand_p_2 (tree exp)
3697 enum tree_code code;
3699 if (TREE_SIDE_EFFECTS (exp)
3700 || tree_could_trap_p (exp))
3701 return false;
3703 while (CONVERT_EXPR_P (exp))
3704 exp = TREE_OPERAND (exp, 0);
3706 code = TREE_CODE (exp);
3708 if (TREE_CODE_CLASS (code) == tcc_comparison)
3709 return (simple_operand_p (TREE_OPERAND (exp, 0))
3710 && simple_operand_p (TREE_OPERAND (exp, 1)));
3712 if (code == TRUTH_NOT_EXPR)
3713 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3715 return simple_operand_p (exp);
3719 /* The following functions are subroutines to fold_range_test and allow it to
3720 try to change a logical combination of comparisons into a range test.
3722 For example, both
3723 X == 2 || X == 3 || X == 4 || X == 5
3725 X >= 2 && X <= 5
3726 are converted to
3727 (unsigned) (X - 2) <= 3
3729 We describe each set of comparisons as being either inside or outside
3730 a range, using a variable named like IN_P, and then describe the
3731 range with a lower and upper bound. If one of the bounds is omitted,
3732 it represents either the highest or lowest value of the type.
3734 In the comments below, we represent a range by two numbers in brackets
3735 preceded by a "+" to designate being inside that range, or a "-" to
3736 designate being outside that range, so the condition can be inverted by
3737 flipping the prefix. An omitted bound is represented by a "-". For
3738 example, "- [-, 10]" means being outside the range starting at the lowest
3739 possible value and ending at 10, in other words, being greater than 10.
3740 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3741 always false.
3743 We set up things so that the missing bounds are handled in a consistent
3744 manner so neither a missing bound nor "true" and "false" need to be
3745 handled using a special case. */
3747 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3748 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3749 and UPPER1_P are nonzero if the respective argument is an upper bound
3750 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3751 must be specified for a comparison. ARG1 will be converted to ARG0's
3752 type if both are specified. */
3754 static tree
3755 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3756 tree arg1, int upper1_p)
3758 tree tem;
3759 int result;
3760 int sgn0, sgn1;
3762 /* If neither arg represents infinity, do the normal operation.
3763 Else, if not a comparison, return infinity. Else handle the special
3764 comparison rules. Note that most of the cases below won't occur, but
3765 are handled for consistency. */
3767 if (arg0 != 0 && arg1 != 0)
3769 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3770 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3771 STRIP_NOPS (tem);
3772 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3775 if (TREE_CODE_CLASS (code) != tcc_comparison)
3776 return 0;
3778 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3779 for neither. In real maths, we cannot assume open ended ranges are
3780 the same. But, this is computer arithmetic, where numbers are finite.
3781 We can therefore make the transformation of any unbounded range with
3782 the value Z, Z being greater than any representable number. This permits
3783 us to treat unbounded ranges as equal. */
3784 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3785 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3786 switch (code)
3788 case EQ_EXPR:
3789 result = sgn0 == sgn1;
3790 break;
3791 case NE_EXPR:
3792 result = sgn0 != sgn1;
3793 break;
3794 case LT_EXPR:
3795 result = sgn0 < sgn1;
3796 break;
3797 case LE_EXPR:
3798 result = sgn0 <= sgn1;
3799 break;
3800 case GT_EXPR:
3801 result = sgn0 > sgn1;
3802 break;
3803 case GE_EXPR:
3804 result = sgn0 >= sgn1;
3805 break;
3806 default:
3807 gcc_unreachable ();
3810 return constant_boolean_node (result, type);
3813 /* Helper routine for make_range. Perform one step for it, return
3814 new expression if the loop should continue or NULL_TREE if it should
3815 stop. */
3817 tree
3818 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3819 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3820 bool *strict_overflow_p)
3822 tree arg0_type = TREE_TYPE (arg0);
3823 tree n_low, n_high, low = *p_low, high = *p_high;
3824 int in_p = *p_in_p, n_in_p;
3826 switch (code)
3828 case TRUTH_NOT_EXPR:
3829 /* We can only do something if the range is testing for zero. */
3830 if (low == NULL_TREE || high == NULL_TREE
3831 || ! integer_zerop (low) || ! integer_zerop (high))
3832 return NULL_TREE;
3833 *p_in_p = ! in_p;
3834 return arg0;
3836 case EQ_EXPR: case NE_EXPR:
3837 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3838 /* We can only do something if the range is testing for zero
3839 and if the second operand is an integer constant. Note that
3840 saying something is "in" the range we make is done by
3841 complementing IN_P since it will set in the initial case of
3842 being not equal to zero; "out" is leaving it alone. */
3843 if (low == NULL_TREE || high == NULL_TREE
3844 || ! integer_zerop (low) || ! integer_zerop (high)
3845 || TREE_CODE (arg1) != INTEGER_CST)
3846 return NULL_TREE;
3848 switch (code)
3850 case NE_EXPR: /* - [c, c] */
3851 low = high = arg1;
3852 break;
3853 case EQ_EXPR: /* + [c, c] */
3854 in_p = ! in_p, low = high = arg1;
3855 break;
3856 case GT_EXPR: /* - [-, c] */
3857 low = 0, high = arg1;
3858 break;
3859 case GE_EXPR: /* + [c, -] */
3860 in_p = ! in_p, low = arg1, high = 0;
3861 break;
3862 case LT_EXPR: /* - [c, -] */
3863 low = arg1, high = 0;
3864 break;
3865 case LE_EXPR: /* + [-, c] */
3866 in_p = ! in_p, low = 0, high = arg1;
3867 break;
3868 default:
3869 gcc_unreachable ();
3872 /* If this is an unsigned comparison, we also know that EXP is
3873 greater than or equal to zero. We base the range tests we make
3874 on that fact, so we record it here so we can parse existing
3875 range tests. We test arg0_type since often the return type
3876 of, e.g. EQ_EXPR, is boolean. */
3877 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3879 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3880 in_p, low, high, 1,
3881 build_int_cst (arg0_type, 0),
3882 NULL_TREE))
3883 return NULL_TREE;
3885 in_p = n_in_p, low = n_low, high = n_high;
3887 /* If the high bound is missing, but we have a nonzero low
3888 bound, reverse the range so it goes from zero to the low bound
3889 minus 1. */
3890 if (high == 0 && low && ! integer_zerop (low))
3892 in_p = ! in_p;
3893 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3894 integer_one_node, 0);
3895 low = build_int_cst (arg0_type, 0);
3899 *p_low = low;
3900 *p_high = high;
3901 *p_in_p = in_p;
3902 return arg0;
3904 case NEGATE_EXPR:
3905 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3906 low and high are non-NULL, then normalize will DTRT. */
3907 if (!TYPE_UNSIGNED (arg0_type)
3908 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3910 if (low == NULL_TREE)
3911 low = TYPE_MIN_VALUE (arg0_type);
3912 if (high == NULL_TREE)
3913 high = TYPE_MAX_VALUE (arg0_type);
3916 /* (-x) IN [a,b] -> x in [-b, -a] */
3917 n_low = range_binop (MINUS_EXPR, exp_type,
3918 build_int_cst (exp_type, 0),
3919 0, high, 1);
3920 n_high = range_binop (MINUS_EXPR, exp_type,
3921 build_int_cst (exp_type, 0),
3922 0, low, 0);
3923 if (n_high != 0 && TREE_OVERFLOW (n_high))
3924 return NULL_TREE;
3925 goto normalize;
3927 case BIT_NOT_EXPR:
3928 /* ~ X -> -X - 1 */
3929 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3930 build_int_cst (exp_type, 1));
3932 case PLUS_EXPR:
3933 case MINUS_EXPR:
3934 if (TREE_CODE (arg1) != INTEGER_CST)
3935 return NULL_TREE;
3937 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3938 move a constant to the other side. */
3939 if (!TYPE_UNSIGNED (arg0_type)
3940 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3941 return NULL_TREE;
3943 /* If EXP is signed, any overflow in the computation is undefined,
3944 so we don't worry about it so long as our computations on
3945 the bounds don't overflow. For unsigned, overflow is defined
3946 and this is exactly the right thing. */
3947 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3948 arg0_type, low, 0, arg1, 0);
3949 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3950 arg0_type, high, 1, arg1, 0);
3951 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3952 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3953 return NULL_TREE;
3955 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3956 *strict_overflow_p = true;
3958 normalize:
3959 /* Check for an unsigned range which has wrapped around the maximum
3960 value thus making n_high < n_low, and normalize it. */
3961 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3963 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3964 integer_one_node, 0);
3965 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3966 integer_one_node, 0);
3968 /* If the range is of the form +/- [ x+1, x ], we won't
3969 be able to normalize it. But then, it represents the
3970 whole range or the empty set, so make it
3971 +/- [ -, - ]. */
3972 if (tree_int_cst_equal (n_low, low)
3973 && tree_int_cst_equal (n_high, high))
3974 low = high = 0;
3975 else
3976 in_p = ! in_p;
3978 else
3979 low = n_low, high = n_high;
3981 *p_low = low;
3982 *p_high = high;
3983 *p_in_p = in_p;
3984 return arg0;
3986 CASE_CONVERT:
3987 case NON_LVALUE_EXPR:
3988 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3989 return NULL_TREE;
3991 if (! INTEGRAL_TYPE_P (arg0_type)
3992 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3993 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3994 return NULL_TREE;
3996 n_low = low, n_high = high;
3998 if (n_low != 0)
3999 n_low = fold_convert_loc (loc, arg0_type, n_low);
4001 if (n_high != 0)
4002 n_high = fold_convert_loc (loc, arg0_type, n_high);
4004 /* If we're converting arg0 from an unsigned type, to exp,
4005 a signed type, we will be doing the comparison as unsigned.
4006 The tests above have already verified that LOW and HIGH
4007 are both positive.
4009 So we have to ensure that we will handle large unsigned
4010 values the same way that the current signed bounds treat
4011 negative values. */
4013 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4015 tree high_positive;
4016 tree equiv_type;
4017 /* For fixed-point modes, we need to pass the saturating flag
4018 as the 2nd parameter. */
4019 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4020 equiv_type
4021 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4022 TYPE_SATURATING (arg0_type));
4023 else
4024 equiv_type
4025 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4027 /* A range without an upper bound is, naturally, unbounded.
4028 Since convert would have cropped a very large value, use
4029 the max value for the destination type. */
4030 high_positive
4031 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4032 : TYPE_MAX_VALUE (arg0_type);
4034 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4035 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4036 fold_convert_loc (loc, arg0_type,
4037 high_positive),
4038 build_int_cst (arg0_type, 1));
4040 /* If the low bound is specified, "and" the range with the
4041 range for which the original unsigned value will be
4042 positive. */
4043 if (low != 0)
4045 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4046 1, fold_convert_loc (loc, arg0_type,
4047 integer_zero_node),
4048 high_positive))
4049 return NULL_TREE;
4051 in_p = (n_in_p == in_p);
4053 else
4055 /* Otherwise, "or" the range with the range of the input
4056 that will be interpreted as negative. */
4057 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4058 1, fold_convert_loc (loc, arg0_type,
4059 integer_zero_node),
4060 high_positive))
4061 return NULL_TREE;
4063 in_p = (in_p != n_in_p);
4067 *p_low = n_low;
4068 *p_high = n_high;
4069 *p_in_p = in_p;
4070 return arg0;
4072 default:
4073 return NULL_TREE;
4077 /* Given EXP, a logical expression, set the range it is testing into
4078 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4079 actually being tested. *PLOW and *PHIGH will be made of the same
4080 type as the returned expression. If EXP is not a comparison, we
4081 will most likely not be returning a useful value and range. Set
4082 *STRICT_OVERFLOW_P to true if the return value is only valid
4083 because signed overflow is undefined; otherwise, do not change
4084 *STRICT_OVERFLOW_P. */
4086 tree
4087 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4088 bool *strict_overflow_p)
4090 enum tree_code code;
4091 tree arg0, arg1 = NULL_TREE;
4092 tree exp_type, nexp;
4093 int in_p;
4094 tree low, high;
4095 location_t loc = EXPR_LOCATION (exp);
4097 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4098 and see if we can refine the range. Some of the cases below may not
4099 happen, but it doesn't seem worth worrying about this. We "continue"
4100 the outer loop when we've changed something; otherwise we "break"
4101 the switch, which will "break" the while. */
4103 in_p = 0;
4104 low = high = build_int_cst (TREE_TYPE (exp), 0);
4106 while (1)
4108 code = TREE_CODE (exp);
4109 exp_type = TREE_TYPE (exp);
4110 arg0 = NULL_TREE;
4112 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4114 if (TREE_OPERAND_LENGTH (exp) > 0)
4115 arg0 = TREE_OPERAND (exp, 0);
4116 if (TREE_CODE_CLASS (code) == tcc_binary
4117 || TREE_CODE_CLASS (code) == tcc_comparison
4118 || (TREE_CODE_CLASS (code) == tcc_expression
4119 && TREE_OPERAND_LENGTH (exp) > 1))
4120 arg1 = TREE_OPERAND (exp, 1);
4122 if (arg0 == NULL_TREE)
4123 break;
4125 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4126 &high, &in_p, strict_overflow_p);
4127 if (nexp == NULL_TREE)
4128 break;
4129 exp = nexp;
4132 /* If EXP is a constant, we can evaluate whether this is true or false. */
4133 if (TREE_CODE (exp) == INTEGER_CST)
4135 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4136 exp, 0, low, 0))
4137 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4138 exp, 1, high, 1)));
4139 low = high = 0;
4140 exp = 0;
4143 *pin_p = in_p, *plow = low, *phigh = high;
4144 return exp;
4147 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4148 type, TYPE, return an expression to test if EXP is in (or out of, depending
4149 on IN_P) the range. Return 0 if the test couldn't be created. */
4151 tree
4152 build_range_check (location_t loc, tree type, tree exp, int in_p,
4153 tree low, tree high)
4155 tree etype = TREE_TYPE (exp), value;
4157 #ifdef HAVE_canonicalize_funcptr_for_compare
4158 /* Disable this optimization for function pointer expressions
4159 on targets that require function pointer canonicalization. */
4160 if (HAVE_canonicalize_funcptr_for_compare
4161 && TREE_CODE (etype) == POINTER_TYPE
4162 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4163 return NULL_TREE;
4164 #endif
4166 if (! in_p)
4168 value = build_range_check (loc, type, exp, 1, low, high);
4169 if (value != 0)
4170 return invert_truthvalue_loc (loc, value);
4172 return 0;
4175 if (low == 0 && high == 0)
4176 return build_int_cst (type, 1);
4178 if (low == 0)
4179 return fold_build2_loc (loc, LE_EXPR, type, exp,
4180 fold_convert_loc (loc, etype, high));
4182 if (high == 0)
4183 return fold_build2_loc (loc, GE_EXPR, type, exp,
4184 fold_convert_loc (loc, etype, low));
4186 if (operand_equal_p (low, high, 0))
4187 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4188 fold_convert_loc (loc, etype, low));
4190 if (integer_zerop (low))
4192 if (! TYPE_UNSIGNED (etype))
4194 etype = unsigned_type_for (etype);
4195 high = fold_convert_loc (loc, etype, high);
4196 exp = fold_convert_loc (loc, etype, exp);
4198 return build_range_check (loc, type, exp, 1, 0, high);
4201 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4202 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4204 unsigned HOST_WIDE_INT lo;
4205 HOST_WIDE_INT hi;
4206 int prec;
4208 prec = TYPE_PRECISION (etype);
4209 if (prec <= HOST_BITS_PER_WIDE_INT)
4211 hi = 0;
4212 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4214 else
4216 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4217 lo = (unsigned HOST_WIDE_INT) -1;
4220 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4222 if (TYPE_UNSIGNED (etype))
4224 tree signed_etype = signed_type_for (etype);
4225 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4226 etype
4227 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4228 else
4229 etype = signed_etype;
4230 exp = fold_convert_loc (loc, etype, exp);
4232 return fold_build2_loc (loc, GT_EXPR, type, exp,
4233 build_int_cst (etype, 0));
4237 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4238 This requires wrap-around arithmetics for the type of the expression.
4239 First make sure that arithmetics in this type is valid, then make sure
4240 that it wraps around. */
4241 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4242 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4243 TYPE_UNSIGNED (etype));
4245 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4247 tree utype, minv, maxv;
4249 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4250 for the type in question, as we rely on this here. */
4251 utype = unsigned_type_for (etype);
4252 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4253 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4254 integer_one_node, 1);
4255 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4257 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4258 minv, 1, maxv, 1)))
4259 etype = utype;
4260 else
4261 return 0;
4264 high = fold_convert_loc (loc, etype, high);
4265 low = fold_convert_loc (loc, etype, low);
4266 exp = fold_convert_loc (loc, etype, exp);
4268 value = const_binop (MINUS_EXPR, high, low);
4271 if (POINTER_TYPE_P (etype))
4273 if (value != 0 && !TREE_OVERFLOW (value))
4275 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4276 return build_range_check (loc, type,
4277 fold_build_pointer_plus_loc (loc, exp, low),
4278 1, build_int_cst (etype, 0), value);
4280 return 0;
4283 if (value != 0 && !TREE_OVERFLOW (value))
4284 return build_range_check (loc, type,
4285 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4286 1, build_int_cst (etype, 0), value);
4288 return 0;
4291 /* Return the predecessor of VAL in its type, handling the infinite case. */
4293 static tree
4294 range_predecessor (tree val)
4296 tree type = TREE_TYPE (val);
4298 if (INTEGRAL_TYPE_P (type)
4299 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4300 return 0;
4301 else
4302 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4305 /* Return the successor of VAL in its type, handling the infinite case. */
4307 static tree
4308 range_successor (tree val)
4310 tree type = TREE_TYPE (val);
4312 if (INTEGRAL_TYPE_P (type)
4313 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4314 return 0;
4315 else
4316 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4319 /* Given two ranges, see if we can merge them into one. Return 1 if we
4320 can, 0 if we can't. Set the output range into the specified parameters. */
4322 bool
4323 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4324 tree high0, int in1_p, tree low1, tree high1)
4326 int no_overlap;
4327 int subset;
4328 int temp;
4329 tree tem;
4330 int in_p;
4331 tree low, high;
4332 int lowequal = ((low0 == 0 && low1 == 0)
4333 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4334 low0, 0, low1, 0)));
4335 int highequal = ((high0 == 0 && high1 == 0)
4336 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4337 high0, 1, high1, 1)));
4339 /* Make range 0 be the range that starts first, or ends last if they
4340 start at the same value. Swap them if it isn't. */
4341 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4342 low0, 0, low1, 0))
4343 || (lowequal
4344 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4345 high1, 1, high0, 1))))
4347 temp = in0_p, in0_p = in1_p, in1_p = temp;
4348 tem = low0, low0 = low1, low1 = tem;
4349 tem = high0, high0 = high1, high1 = tem;
4352 /* Now flag two cases, whether the ranges are disjoint or whether the
4353 second range is totally subsumed in the first. Note that the tests
4354 below are simplified by the ones above. */
4355 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4356 high0, 1, low1, 0));
4357 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4358 high1, 1, high0, 1));
4360 /* We now have four cases, depending on whether we are including or
4361 excluding the two ranges. */
4362 if (in0_p && in1_p)
4364 /* If they don't overlap, the result is false. If the second range
4365 is a subset it is the result. Otherwise, the range is from the start
4366 of the second to the end of the first. */
4367 if (no_overlap)
4368 in_p = 0, low = high = 0;
4369 else if (subset)
4370 in_p = 1, low = low1, high = high1;
4371 else
4372 in_p = 1, low = low1, high = high0;
4375 else if (in0_p && ! in1_p)
4377 /* If they don't overlap, the result is the first range. If they are
4378 equal, the result is false. If the second range is a subset of the
4379 first, and the ranges begin at the same place, we go from just after
4380 the end of the second range to the end of the first. If the second
4381 range is not a subset of the first, or if it is a subset and both
4382 ranges end at the same place, the range starts at the start of the
4383 first range and ends just before the second range.
4384 Otherwise, we can't describe this as a single range. */
4385 if (no_overlap)
4386 in_p = 1, low = low0, high = high0;
4387 else if (lowequal && highequal)
4388 in_p = 0, low = high = 0;
4389 else if (subset && lowequal)
4391 low = range_successor (high1);
4392 high = high0;
4393 in_p = 1;
4394 if (low == 0)
4396 /* We are in the weird situation where high0 > high1 but
4397 high1 has no successor. Punt. */
4398 return 0;
4401 else if (! subset || highequal)
4403 low = low0;
4404 high = range_predecessor (low1);
4405 in_p = 1;
4406 if (high == 0)
4408 /* low0 < low1 but low1 has no predecessor. Punt. */
4409 return 0;
4412 else
4413 return 0;
4416 else if (! in0_p && in1_p)
4418 /* If they don't overlap, the result is the second range. If the second
4419 is a subset of the first, the result is false. Otherwise,
4420 the range starts just after the first range and ends at the
4421 end of the second. */
4422 if (no_overlap)
4423 in_p = 1, low = low1, high = high1;
4424 else if (subset || highequal)
4425 in_p = 0, low = high = 0;
4426 else
4428 low = range_successor (high0);
4429 high = high1;
4430 in_p = 1;
4431 if (low == 0)
4433 /* high1 > high0 but high0 has no successor. Punt. */
4434 return 0;
4439 else
4441 /* The case where we are excluding both ranges. Here the complex case
4442 is if they don't overlap. In that case, the only time we have a
4443 range is if they are adjacent. If the second is a subset of the
4444 first, the result is the first. Otherwise, the range to exclude
4445 starts at the beginning of the first range and ends at the end of the
4446 second. */
4447 if (no_overlap)
4449 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4450 range_successor (high0),
4451 1, low1, 0)))
4452 in_p = 0, low = low0, high = high1;
4453 else
4455 /* Canonicalize - [min, x] into - [-, x]. */
4456 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4457 switch (TREE_CODE (TREE_TYPE (low0)))
4459 case ENUMERAL_TYPE:
4460 if (TYPE_PRECISION (TREE_TYPE (low0))
4461 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4462 break;
4463 /* FALLTHROUGH */
4464 case INTEGER_TYPE:
4465 if (tree_int_cst_equal (low0,
4466 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4467 low0 = 0;
4468 break;
4469 case POINTER_TYPE:
4470 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4471 && integer_zerop (low0))
4472 low0 = 0;
4473 break;
4474 default:
4475 break;
4478 /* Canonicalize - [x, max] into - [x, -]. */
4479 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4480 switch (TREE_CODE (TREE_TYPE (high1)))
4482 case ENUMERAL_TYPE:
4483 if (TYPE_PRECISION (TREE_TYPE (high1))
4484 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4485 break;
4486 /* FALLTHROUGH */
4487 case INTEGER_TYPE:
4488 if (tree_int_cst_equal (high1,
4489 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4490 high1 = 0;
4491 break;
4492 case POINTER_TYPE:
4493 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4494 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4495 high1, 1,
4496 integer_one_node, 1)))
4497 high1 = 0;
4498 break;
4499 default:
4500 break;
4503 /* The ranges might be also adjacent between the maximum and
4504 minimum values of the given type. For
4505 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4506 return + [x + 1, y - 1]. */
4507 if (low0 == 0 && high1 == 0)
4509 low = range_successor (high0);
4510 high = range_predecessor (low1);
4511 if (low == 0 || high == 0)
4512 return 0;
4514 in_p = 1;
4516 else
4517 return 0;
4520 else if (subset)
4521 in_p = 0, low = low0, high = high0;
4522 else
4523 in_p = 0, low = low0, high = high1;
4526 *pin_p = in_p, *plow = low, *phigh = high;
4527 return 1;
4531 /* Subroutine of fold, looking inside expressions of the form
4532 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4533 of the COND_EXPR. This function is being used also to optimize
4534 A op B ? C : A, by reversing the comparison first.
4536 Return a folded expression whose code is not a COND_EXPR
4537 anymore, or NULL_TREE if no folding opportunity is found. */
4539 static tree
4540 fold_cond_expr_with_comparison (location_t loc, tree type,
4541 tree arg0, tree arg1, tree arg2)
4543 enum tree_code comp_code = TREE_CODE (arg0);
4544 tree arg00 = TREE_OPERAND (arg0, 0);
4545 tree arg01 = TREE_OPERAND (arg0, 1);
4546 tree arg1_type = TREE_TYPE (arg1);
4547 tree tem;
4549 STRIP_NOPS (arg1);
4550 STRIP_NOPS (arg2);
4552 /* If we have A op 0 ? A : -A, consider applying the following
4553 transformations:
4555 A == 0? A : -A same as -A
4556 A != 0? A : -A same as A
4557 A >= 0? A : -A same as abs (A)
4558 A > 0? A : -A same as abs (A)
4559 A <= 0? A : -A same as -abs (A)
4560 A < 0? A : -A same as -abs (A)
4562 None of these transformations work for modes with signed
4563 zeros. If A is +/-0, the first two transformations will
4564 change the sign of the result (from +0 to -0, or vice
4565 versa). The last four will fix the sign of the result,
4566 even though the original expressions could be positive or
4567 negative, depending on the sign of A.
4569 Note that all these transformations are correct if A is
4570 NaN, since the two alternatives (A and -A) are also NaNs. */
4571 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4572 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4573 ? real_zerop (arg01)
4574 : integer_zerop (arg01))
4575 && ((TREE_CODE (arg2) == NEGATE_EXPR
4576 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4577 /* In the case that A is of the form X-Y, '-A' (arg2) may
4578 have already been folded to Y-X, check for that. */
4579 || (TREE_CODE (arg1) == MINUS_EXPR
4580 && TREE_CODE (arg2) == MINUS_EXPR
4581 && operand_equal_p (TREE_OPERAND (arg1, 0),
4582 TREE_OPERAND (arg2, 1), 0)
4583 && operand_equal_p (TREE_OPERAND (arg1, 1),
4584 TREE_OPERAND (arg2, 0), 0))))
4585 switch (comp_code)
4587 case EQ_EXPR:
4588 case UNEQ_EXPR:
4589 tem = fold_convert_loc (loc, arg1_type, arg1);
4590 return pedantic_non_lvalue_loc (loc,
4591 fold_convert_loc (loc, type,
4592 negate_expr (tem)));
4593 case NE_EXPR:
4594 case LTGT_EXPR:
4595 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4596 case UNGE_EXPR:
4597 case UNGT_EXPR:
4598 if (flag_trapping_math)
4599 break;
4600 /* Fall through. */
4601 case GE_EXPR:
4602 case GT_EXPR:
4603 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4604 arg1 = fold_convert_loc (loc, signed_type_for
4605 (TREE_TYPE (arg1)), arg1);
4606 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4607 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4608 case UNLE_EXPR:
4609 case UNLT_EXPR:
4610 if (flag_trapping_math)
4611 break;
4612 case LE_EXPR:
4613 case LT_EXPR:
4614 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4615 arg1 = fold_convert_loc (loc, signed_type_for
4616 (TREE_TYPE (arg1)), arg1);
4617 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4618 return negate_expr (fold_convert_loc (loc, type, tem));
4619 default:
4620 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4621 break;
4624 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4625 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4626 both transformations are correct when A is NaN: A != 0
4627 is then true, and A == 0 is false. */
4629 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4630 && integer_zerop (arg01) && integer_zerop (arg2))
4632 if (comp_code == NE_EXPR)
4633 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4634 else if (comp_code == EQ_EXPR)
4635 return build_int_cst (type, 0);
4638 /* Try some transformations of A op B ? A : B.
4640 A == B? A : B same as B
4641 A != B? A : B same as A
4642 A >= B? A : B same as max (A, B)
4643 A > B? A : B same as max (B, A)
4644 A <= B? A : B same as min (A, B)
4645 A < B? A : B same as min (B, A)
4647 As above, these transformations don't work in the presence
4648 of signed zeros. For example, if A and B are zeros of
4649 opposite sign, the first two transformations will change
4650 the sign of the result. In the last four, the original
4651 expressions give different results for (A=+0, B=-0) and
4652 (A=-0, B=+0), but the transformed expressions do not.
4654 The first two transformations are correct if either A or B
4655 is a NaN. In the first transformation, the condition will
4656 be false, and B will indeed be chosen. In the case of the
4657 second transformation, the condition A != B will be true,
4658 and A will be chosen.
4660 The conversions to max() and min() are not correct if B is
4661 a number and A is not. The conditions in the original
4662 expressions will be false, so all four give B. The min()
4663 and max() versions would give a NaN instead. */
4664 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4665 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4666 /* Avoid these transformations if the COND_EXPR may be used
4667 as an lvalue in the C++ front-end. PR c++/19199. */
4668 && (in_gimple_form
4669 || (strcmp (lang_hooks.name, "GNU C++") != 0
4670 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4671 || ! maybe_lvalue_p (arg1)
4672 || ! maybe_lvalue_p (arg2)))
4674 tree comp_op0 = arg00;
4675 tree comp_op1 = arg01;
4676 tree comp_type = TREE_TYPE (comp_op0);
4678 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4679 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4681 comp_type = type;
4682 comp_op0 = arg1;
4683 comp_op1 = arg2;
4686 switch (comp_code)
4688 case EQ_EXPR:
4689 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4690 case NE_EXPR:
4691 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4692 case LE_EXPR:
4693 case LT_EXPR:
4694 case UNLE_EXPR:
4695 case UNLT_EXPR:
4696 /* In C++ a ?: expression can be an lvalue, so put the
4697 operand which will be used if they are equal first
4698 so that we can convert this back to the
4699 corresponding COND_EXPR. */
4700 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4702 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4703 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4704 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4705 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4706 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4707 comp_op1, comp_op0);
4708 return pedantic_non_lvalue_loc (loc,
4709 fold_convert_loc (loc, type, tem));
4711 break;
4712 case GE_EXPR:
4713 case GT_EXPR:
4714 case UNGE_EXPR:
4715 case UNGT_EXPR:
4716 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4718 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4719 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4720 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4721 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4722 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4723 comp_op1, comp_op0);
4724 return pedantic_non_lvalue_loc (loc,
4725 fold_convert_loc (loc, type, tem));
4727 break;
4728 case UNEQ_EXPR:
4729 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4730 return pedantic_non_lvalue_loc (loc,
4731 fold_convert_loc (loc, type, arg2));
4732 break;
4733 case LTGT_EXPR:
4734 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4735 return pedantic_non_lvalue_loc (loc,
4736 fold_convert_loc (loc, type, arg1));
4737 break;
4738 default:
4739 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4740 break;
4744 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4745 we might still be able to simplify this. For example,
4746 if C1 is one less or one more than C2, this might have started
4747 out as a MIN or MAX and been transformed by this function.
4748 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4750 if (INTEGRAL_TYPE_P (type)
4751 && TREE_CODE (arg01) == INTEGER_CST
4752 && TREE_CODE (arg2) == INTEGER_CST)
4753 switch (comp_code)
4755 case EQ_EXPR:
4756 if (TREE_CODE (arg1) == INTEGER_CST)
4757 break;
4758 /* We can replace A with C1 in this case. */
4759 arg1 = fold_convert_loc (loc, type, arg01);
4760 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4762 case LT_EXPR:
4763 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4764 MIN_EXPR, to preserve the signedness of the comparison. */
4765 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4766 OEP_ONLY_CONST)
4767 && operand_equal_p (arg01,
4768 const_binop (PLUS_EXPR, arg2,
4769 build_int_cst (type, 1)),
4770 OEP_ONLY_CONST))
4772 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4773 fold_convert_loc (loc, TREE_TYPE (arg00),
4774 arg2));
4775 return pedantic_non_lvalue_loc (loc,
4776 fold_convert_loc (loc, type, tem));
4778 break;
4780 case LE_EXPR:
4781 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4782 as above. */
4783 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4784 OEP_ONLY_CONST)
4785 && operand_equal_p (arg01,
4786 const_binop (MINUS_EXPR, arg2,
4787 build_int_cst (type, 1)),
4788 OEP_ONLY_CONST))
4790 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4791 fold_convert_loc (loc, TREE_TYPE (arg00),
4792 arg2));
4793 return pedantic_non_lvalue_loc (loc,
4794 fold_convert_loc (loc, type, tem));
4796 break;
4798 case GT_EXPR:
4799 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4800 MAX_EXPR, to preserve the signedness of the comparison. */
4801 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4802 OEP_ONLY_CONST)
4803 && operand_equal_p (arg01,
4804 const_binop (MINUS_EXPR, arg2,
4805 build_int_cst (type, 1)),
4806 OEP_ONLY_CONST))
4808 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4809 fold_convert_loc (loc, TREE_TYPE (arg00),
4810 arg2));
4811 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4813 break;
4815 case GE_EXPR:
4816 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4817 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4818 OEP_ONLY_CONST)
4819 && operand_equal_p (arg01,
4820 const_binop (PLUS_EXPR, arg2,
4821 build_int_cst (type, 1)),
4822 OEP_ONLY_CONST))
4824 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4825 fold_convert_loc (loc, TREE_TYPE (arg00),
4826 arg2));
4827 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4829 break;
4830 case NE_EXPR:
4831 break;
4832 default:
4833 gcc_unreachable ();
4836 return NULL_TREE;
4841 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4842 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4843 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4844 false) >= 2)
4845 #endif
4847 /* EXP is some logical combination of boolean tests. See if we can
4848 merge it into some range test. Return the new tree if so. */
4850 static tree
4851 fold_range_test (location_t loc, enum tree_code code, tree type,
4852 tree op0, tree op1)
4854 int or_op = (code == TRUTH_ORIF_EXPR
4855 || code == TRUTH_OR_EXPR);
4856 int in0_p, in1_p, in_p;
4857 tree low0, low1, low, high0, high1, high;
4858 bool strict_overflow_p = false;
4859 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4860 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4861 tree tem;
4862 const char * const warnmsg = G_("assuming signed overflow does not occur "
4863 "when simplifying range test");
4865 /* If this is an OR operation, invert both sides; we will invert
4866 again at the end. */
4867 if (or_op)
4868 in0_p = ! in0_p, in1_p = ! in1_p;
4870 /* If both expressions are the same, if we can merge the ranges, and we
4871 can build the range test, return it or it inverted. If one of the
4872 ranges is always true or always false, consider it to be the same
4873 expression as the other. */
4874 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4875 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4876 in1_p, low1, high1)
4877 && 0 != (tem = (build_range_check (loc, type,
4878 lhs != 0 ? lhs
4879 : rhs != 0 ? rhs : integer_zero_node,
4880 in_p, low, high))))
4882 if (strict_overflow_p)
4883 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4884 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4887 /* On machines where the branch cost is expensive, if this is a
4888 short-circuited branch and the underlying object on both sides
4889 is the same, make a non-short-circuit operation. */
4890 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4891 && lhs != 0 && rhs != 0
4892 && (code == TRUTH_ANDIF_EXPR
4893 || code == TRUTH_ORIF_EXPR)
4894 && operand_equal_p (lhs, rhs, 0))
4896 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4897 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4898 which cases we can't do this. */
4899 if (simple_operand_p (lhs))
4900 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4901 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4902 type, op0, op1);
4904 else if (!lang_hooks.decls.global_bindings_p ()
4905 && !CONTAINS_PLACEHOLDER_P (lhs))
4907 tree common = save_expr (lhs);
4909 if (0 != (lhs = build_range_check (loc, type, common,
4910 or_op ? ! in0_p : in0_p,
4911 low0, high0))
4912 && (0 != (rhs = build_range_check (loc, type, common,
4913 or_op ? ! in1_p : in1_p,
4914 low1, high1))))
4916 if (strict_overflow_p)
4917 fold_overflow_warning (warnmsg,
4918 WARN_STRICT_OVERFLOW_COMPARISON);
4919 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4920 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4921 type, lhs, rhs);
4926 return 0;
4929 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4930 bit value. Arrange things so the extra bits will be set to zero if and
4931 only if C is signed-extended to its full width. If MASK is nonzero,
4932 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4934 static tree
4935 unextend (tree c, int p, int unsignedp, tree mask)
4937 tree type = TREE_TYPE (c);
4938 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4939 tree temp;
4941 if (p == modesize || unsignedp)
4942 return c;
4944 /* We work by getting just the sign bit into the low-order bit, then
4945 into the high-order bit, then sign-extend. We then XOR that value
4946 with C. */
4947 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4948 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4950 /* We must use a signed type in order to get an arithmetic right shift.
4951 However, we must also avoid introducing accidental overflows, so that
4952 a subsequent call to integer_zerop will work. Hence we must
4953 do the type conversion here. At this point, the constant is either
4954 zero or one, and the conversion to a signed type can never overflow.
4955 We could get an overflow if this conversion is done anywhere else. */
4956 if (TYPE_UNSIGNED (type))
4957 temp = fold_convert (signed_type_for (type), temp);
4959 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4960 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4961 if (mask != 0)
4962 temp = const_binop (BIT_AND_EXPR, temp,
4963 fold_convert (TREE_TYPE (c), mask));
4964 /* If necessary, convert the type back to match the type of C. */
4965 if (TYPE_UNSIGNED (type))
4966 temp = fold_convert (type, temp);
4968 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4971 /* For an expression that has the form
4972 (A && B) || ~B
4974 (A || B) && ~B,
4975 we can drop one of the inner expressions and simplify to
4976 A || ~B
4978 A && ~B
4979 LOC is the location of the resulting expression. OP is the inner
4980 logical operation; the left-hand side in the examples above, while CMPOP
4981 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4982 removing a condition that guards another, as in
4983 (A != NULL && A->...) || A == NULL
4984 which we must not transform. If RHS_ONLY is true, only eliminate the
4985 right-most operand of the inner logical operation. */
4987 static tree
4988 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4989 bool rhs_only)
4991 tree type = TREE_TYPE (cmpop);
4992 enum tree_code code = TREE_CODE (cmpop);
4993 enum tree_code truthop_code = TREE_CODE (op);
4994 tree lhs = TREE_OPERAND (op, 0);
4995 tree rhs = TREE_OPERAND (op, 1);
4996 tree orig_lhs = lhs, orig_rhs = rhs;
4997 enum tree_code rhs_code = TREE_CODE (rhs);
4998 enum tree_code lhs_code = TREE_CODE (lhs);
4999 enum tree_code inv_code;
5001 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5002 return NULL_TREE;
5004 if (TREE_CODE_CLASS (code) != tcc_comparison)
5005 return NULL_TREE;
5007 if (rhs_code == truthop_code)
5009 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5010 if (newrhs != NULL_TREE)
5012 rhs = newrhs;
5013 rhs_code = TREE_CODE (rhs);
5016 if (lhs_code == truthop_code && !rhs_only)
5018 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5019 if (newlhs != NULL_TREE)
5021 lhs = newlhs;
5022 lhs_code = TREE_CODE (lhs);
5026 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5027 if (inv_code == rhs_code
5028 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5029 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5030 return lhs;
5031 if (!rhs_only && inv_code == lhs_code
5032 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5033 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5034 return rhs;
5035 if (rhs != orig_rhs || lhs != orig_lhs)
5036 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5037 lhs, rhs);
5038 return NULL_TREE;
5041 /* Find ways of folding logical expressions of LHS and RHS:
5042 Try to merge two comparisons to the same innermost item.
5043 Look for range tests like "ch >= '0' && ch <= '9'".
5044 Look for combinations of simple terms on machines with expensive branches
5045 and evaluate the RHS unconditionally.
5047 For example, if we have p->a == 2 && p->b == 4 and we can make an
5048 object large enough to span both A and B, we can do this with a comparison
5049 against the object ANDed with the a mask.
5051 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5052 operations to do this with one comparison.
5054 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5055 function and the one above.
5057 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5058 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5060 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5061 two operands.
5063 We return the simplified tree or 0 if no optimization is possible. */
5065 static tree
5066 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5067 tree lhs, tree rhs)
5069 /* If this is the "or" of two comparisons, we can do something if
5070 the comparisons are NE_EXPR. If this is the "and", we can do something
5071 if the comparisons are EQ_EXPR. I.e.,
5072 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5074 WANTED_CODE is this operation code. For single bit fields, we can
5075 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5076 comparison for one-bit fields. */
5078 enum tree_code wanted_code;
5079 enum tree_code lcode, rcode;
5080 tree ll_arg, lr_arg, rl_arg, rr_arg;
5081 tree ll_inner, lr_inner, rl_inner, rr_inner;
5082 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5083 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5084 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5085 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5086 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5087 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5088 enum machine_mode lnmode, rnmode;
5089 tree ll_mask, lr_mask, rl_mask, rr_mask;
5090 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5091 tree l_const, r_const;
5092 tree lntype, rntype, result;
5093 HOST_WIDE_INT first_bit, end_bit;
5094 int volatilep;
5096 /* Start by getting the comparison codes. Fail if anything is volatile.
5097 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5098 it were surrounded with a NE_EXPR. */
5100 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5101 return 0;
5103 lcode = TREE_CODE (lhs);
5104 rcode = TREE_CODE (rhs);
5106 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5108 lhs = build2 (NE_EXPR, truth_type, lhs,
5109 build_int_cst (TREE_TYPE (lhs), 0));
5110 lcode = NE_EXPR;
5113 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5115 rhs = build2 (NE_EXPR, truth_type, rhs,
5116 build_int_cst (TREE_TYPE (rhs), 0));
5117 rcode = NE_EXPR;
5120 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5121 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5122 return 0;
5124 ll_arg = TREE_OPERAND (lhs, 0);
5125 lr_arg = TREE_OPERAND (lhs, 1);
5126 rl_arg = TREE_OPERAND (rhs, 0);
5127 rr_arg = TREE_OPERAND (rhs, 1);
5129 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5130 if (simple_operand_p (ll_arg)
5131 && simple_operand_p (lr_arg))
5133 if (operand_equal_p (ll_arg, rl_arg, 0)
5134 && operand_equal_p (lr_arg, rr_arg, 0))
5136 result = combine_comparisons (loc, code, lcode, rcode,
5137 truth_type, ll_arg, lr_arg);
5138 if (result)
5139 return result;
5141 else if (operand_equal_p (ll_arg, rr_arg, 0)
5142 && operand_equal_p (lr_arg, rl_arg, 0))
5144 result = combine_comparisons (loc, code, lcode,
5145 swap_tree_comparison (rcode),
5146 truth_type, ll_arg, lr_arg);
5147 if (result)
5148 return result;
5152 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5153 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5155 /* If the RHS can be evaluated unconditionally and its operands are
5156 simple, it wins to evaluate the RHS unconditionally on machines
5157 with expensive branches. In this case, this isn't a comparison
5158 that can be merged. */
5160 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5161 false) >= 2
5162 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5163 && simple_operand_p (rl_arg)
5164 && simple_operand_p (rr_arg))
5166 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5167 if (code == TRUTH_OR_EXPR
5168 && lcode == NE_EXPR && integer_zerop (lr_arg)
5169 && rcode == NE_EXPR && integer_zerop (rr_arg)
5170 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5171 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5172 return build2_loc (loc, NE_EXPR, truth_type,
5173 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5174 ll_arg, rl_arg),
5175 build_int_cst (TREE_TYPE (ll_arg), 0));
5177 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5178 if (code == TRUTH_AND_EXPR
5179 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5180 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5181 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5182 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5183 return build2_loc (loc, EQ_EXPR, truth_type,
5184 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5185 ll_arg, rl_arg),
5186 build_int_cst (TREE_TYPE (ll_arg), 0));
5189 /* See if the comparisons can be merged. Then get all the parameters for
5190 each side. */
5192 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5193 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5194 return 0;
5196 volatilep = 0;
5197 ll_inner = decode_field_reference (loc, ll_arg,
5198 &ll_bitsize, &ll_bitpos, &ll_mode,
5199 &ll_unsignedp, &volatilep, &ll_mask,
5200 &ll_and_mask);
5201 lr_inner = decode_field_reference (loc, lr_arg,
5202 &lr_bitsize, &lr_bitpos, &lr_mode,
5203 &lr_unsignedp, &volatilep, &lr_mask,
5204 &lr_and_mask);
5205 rl_inner = decode_field_reference (loc, rl_arg,
5206 &rl_bitsize, &rl_bitpos, &rl_mode,
5207 &rl_unsignedp, &volatilep, &rl_mask,
5208 &rl_and_mask);
5209 rr_inner = decode_field_reference (loc, rr_arg,
5210 &rr_bitsize, &rr_bitpos, &rr_mode,
5211 &rr_unsignedp, &volatilep, &rr_mask,
5212 &rr_and_mask);
5214 /* It must be true that the inner operation on the lhs of each
5215 comparison must be the same if we are to be able to do anything.
5216 Then see if we have constants. If not, the same must be true for
5217 the rhs's. */
5218 if (volatilep || ll_inner == 0 || rl_inner == 0
5219 || ! operand_equal_p (ll_inner, rl_inner, 0))
5220 return 0;
5222 if (TREE_CODE (lr_arg) == INTEGER_CST
5223 && TREE_CODE (rr_arg) == INTEGER_CST)
5224 l_const = lr_arg, r_const = rr_arg;
5225 else if (lr_inner == 0 || rr_inner == 0
5226 || ! operand_equal_p (lr_inner, rr_inner, 0))
5227 return 0;
5228 else
5229 l_const = r_const = 0;
5231 /* If either comparison code is not correct for our logical operation,
5232 fail. However, we can convert a one-bit comparison against zero into
5233 the opposite comparison against that bit being set in the field. */
5235 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5236 if (lcode != wanted_code)
5238 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5240 /* Make the left operand unsigned, since we are only interested
5241 in the value of one bit. Otherwise we are doing the wrong
5242 thing below. */
5243 ll_unsignedp = 1;
5244 l_const = ll_mask;
5246 else
5247 return 0;
5250 /* This is analogous to the code for l_const above. */
5251 if (rcode != wanted_code)
5253 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5255 rl_unsignedp = 1;
5256 r_const = rl_mask;
5258 else
5259 return 0;
5262 /* See if we can find a mode that contains both fields being compared on
5263 the left. If we can't, fail. Otherwise, update all constants and masks
5264 to be relative to a field of that size. */
5265 first_bit = MIN (ll_bitpos, rl_bitpos);
5266 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5267 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5268 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5269 volatilep);
5270 if (lnmode == VOIDmode)
5271 return 0;
5273 lnbitsize = GET_MODE_BITSIZE (lnmode);
5274 lnbitpos = first_bit & ~ (lnbitsize - 1);
5275 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5276 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5278 if (BYTES_BIG_ENDIAN)
5280 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5281 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5284 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5285 size_int (xll_bitpos));
5286 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5287 size_int (xrl_bitpos));
5289 if (l_const)
5291 l_const = fold_convert_loc (loc, lntype, l_const);
5292 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5293 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5294 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5295 fold_build1_loc (loc, BIT_NOT_EXPR,
5296 lntype, ll_mask))))
5298 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5300 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5303 if (r_const)
5305 r_const = fold_convert_loc (loc, lntype, r_const);
5306 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5307 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5308 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5309 fold_build1_loc (loc, BIT_NOT_EXPR,
5310 lntype, rl_mask))))
5312 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5314 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5318 /* If the right sides are not constant, do the same for it. Also,
5319 disallow this optimization if a size or signedness mismatch occurs
5320 between the left and right sides. */
5321 if (l_const == 0)
5323 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5324 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5325 /* Make sure the two fields on the right
5326 correspond to the left without being swapped. */
5327 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5328 return 0;
5330 first_bit = MIN (lr_bitpos, rr_bitpos);
5331 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5332 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5333 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5334 volatilep);
5335 if (rnmode == VOIDmode)
5336 return 0;
5338 rnbitsize = GET_MODE_BITSIZE (rnmode);
5339 rnbitpos = first_bit & ~ (rnbitsize - 1);
5340 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5341 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5343 if (BYTES_BIG_ENDIAN)
5345 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5346 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5349 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5350 rntype, lr_mask),
5351 size_int (xlr_bitpos));
5352 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5353 rntype, rr_mask),
5354 size_int (xrr_bitpos));
5356 /* Make a mask that corresponds to both fields being compared.
5357 Do this for both items being compared. If the operands are the
5358 same size and the bits being compared are in the same position
5359 then we can do this by masking both and comparing the masked
5360 results. */
5361 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5362 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5363 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5365 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5366 ll_unsignedp || rl_unsignedp);
5367 if (! all_ones_mask_p (ll_mask, lnbitsize))
5368 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5370 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5371 lr_unsignedp || rr_unsignedp);
5372 if (! all_ones_mask_p (lr_mask, rnbitsize))
5373 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5375 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5378 /* There is still another way we can do something: If both pairs of
5379 fields being compared are adjacent, we may be able to make a wider
5380 field containing them both.
5382 Note that we still must mask the lhs/rhs expressions. Furthermore,
5383 the mask must be shifted to account for the shift done by
5384 make_bit_field_ref. */
5385 if ((ll_bitsize + ll_bitpos == rl_bitpos
5386 && lr_bitsize + lr_bitpos == rr_bitpos)
5387 || (ll_bitpos == rl_bitpos + rl_bitsize
5388 && lr_bitpos == rr_bitpos + rr_bitsize))
5390 tree type;
5392 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5393 ll_bitsize + rl_bitsize,
5394 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5395 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5396 lr_bitsize + rr_bitsize,
5397 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5399 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5400 size_int (MIN (xll_bitpos, xrl_bitpos)));
5401 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5402 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5404 /* Convert to the smaller type before masking out unwanted bits. */
5405 type = lntype;
5406 if (lntype != rntype)
5408 if (lnbitsize > rnbitsize)
5410 lhs = fold_convert_loc (loc, rntype, lhs);
5411 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5412 type = rntype;
5414 else if (lnbitsize < rnbitsize)
5416 rhs = fold_convert_loc (loc, lntype, rhs);
5417 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5418 type = lntype;
5422 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5423 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5425 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5426 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5428 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5431 return 0;
5434 /* Handle the case of comparisons with constants. If there is something in
5435 common between the masks, those bits of the constants must be the same.
5436 If not, the condition is always false. Test for this to avoid generating
5437 incorrect code below. */
5438 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5439 if (! integer_zerop (result)
5440 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5441 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5443 if (wanted_code == NE_EXPR)
5445 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5446 return constant_boolean_node (true, truth_type);
5448 else
5450 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5451 return constant_boolean_node (false, truth_type);
5455 /* Construct the expression we will return. First get the component
5456 reference we will make. Unless the mask is all ones the width of
5457 that field, perform the mask operation. Then compare with the
5458 merged constant. */
5459 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5460 ll_unsignedp || rl_unsignedp);
5462 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5463 if (! all_ones_mask_p (ll_mask, lnbitsize))
5464 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5466 return build2_loc (loc, wanted_code, truth_type, result,
5467 const_binop (BIT_IOR_EXPR, l_const, r_const));
5470 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5471 constant. */
5473 static tree
5474 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5475 tree op0, tree op1)
5477 tree arg0 = op0;
5478 enum tree_code op_code;
5479 tree comp_const;
5480 tree minmax_const;
5481 int consts_equal, consts_lt;
5482 tree inner;
5484 STRIP_SIGN_NOPS (arg0);
5486 op_code = TREE_CODE (arg0);
5487 minmax_const = TREE_OPERAND (arg0, 1);
5488 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5489 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5490 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5491 inner = TREE_OPERAND (arg0, 0);
5493 /* If something does not permit us to optimize, return the original tree. */
5494 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5495 || TREE_CODE (comp_const) != INTEGER_CST
5496 || TREE_OVERFLOW (comp_const)
5497 || TREE_CODE (minmax_const) != INTEGER_CST
5498 || TREE_OVERFLOW (minmax_const))
5499 return NULL_TREE;
5501 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5502 and GT_EXPR, doing the rest with recursive calls using logical
5503 simplifications. */
5504 switch (code)
5506 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5508 tree tem
5509 = optimize_minmax_comparison (loc,
5510 invert_tree_comparison (code, false),
5511 type, op0, op1);
5512 if (tem)
5513 return invert_truthvalue_loc (loc, tem);
5514 return NULL_TREE;
5517 case GE_EXPR:
5518 return
5519 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5520 optimize_minmax_comparison
5521 (loc, EQ_EXPR, type, arg0, comp_const),
5522 optimize_minmax_comparison
5523 (loc, GT_EXPR, type, arg0, comp_const));
5525 case EQ_EXPR:
5526 if (op_code == MAX_EXPR && consts_equal)
5527 /* MAX (X, 0) == 0 -> X <= 0 */
5528 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5530 else if (op_code == MAX_EXPR && consts_lt)
5531 /* MAX (X, 0) == 5 -> X == 5 */
5532 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5534 else if (op_code == MAX_EXPR)
5535 /* MAX (X, 0) == -1 -> false */
5536 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5538 else if (consts_equal)
5539 /* MIN (X, 0) == 0 -> X >= 0 */
5540 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5542 else if (consts_lt)
5543 /* MIN (X, 0) == 5 -> false */
5544 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5546 else
5547 /* MIN (X, 0) == -1 -> X == -1 */
5548 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5550 case GT_EXPR:
5551 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5552 /* MAX (X, 0) > 0 -> X > 0
5553 MAX (X, 0) > 5 -> X > 5 */
5554 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5556 else if (op_code == MAX_EXPR)
5557 /* MAX (X, 0) > -1 -> true */
5558 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5560 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5561 /* MIN (X, 0) > 0 -> false
5562 MIN (X, 0) > 5 -> false */
5563 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5565 else
5566 /* MIN (X, 0) > -1 -> X > -1 */
5567 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5569 default:
5570 return NULL_TREE;
5574 /* T is an integer expression that is being multiplied, divided, or taken a
5575 modulus (CODE says which and what kind of divide or modulus) by a
5576 constant C. See if we can eliminate that operation by folding it with
5577 other operations already in T. WIDE_TYPE, if non-null, is a type that
5578 should be used for the computation if wider than our type.
5580 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5581 (X * 2) + (Y * 4). We must, however, be assured that either the original
5582 expression would not overflow or that overflow is undefined for the type
5583 in the language in question.
5585 If we return a non-null expression, it is an equivalent form of the
5586 original computation, but need not be in the original type.
5588 We set *STRICT_OVERFLOW_P to true if the return values depends on
5589 signed overflow being undefined. Otherwise we do not change
5590 *STRICT_OVERFLOW_P. */
5592 static tree
5593 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5594 bool *strict_overflow_p)
5596 /* To avoid exponential search depth, refuse to allow recursion past
5597 three levels. Beyond that (1) it's highly unlikely that we'll find
5598 something interesting and (2) we've probably processed it before
5599 when we built the inner expression. */
5601 static int depth;
5602 tree ret;
5604 if (depth > 3)
5605 return NULL;
5607 depth++;
5608 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5609 depth--;
5611 return ret;
5614 static tree
5615 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5616 bool *strict_overflow_p)
5618 tree type = TREE_TYPE (t);
5619 enum tree_code tcode = TREE_CODE (t);
5620 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5621 > GET_MODE_SIZE (TYPE_MODE (type)))
5622 ? wide_type : type);
5623 tree t1, t2;
5624 int same_p = tcode == code;
5625 tree op0 = NULL_TREE, op1 = NULL_TREE;
5626 bool sub_strict_overflow_p;
5628 /* Don't deal with constants of zero here; they confuse the code below. */
5629 if (integer_zerop (c))
5630 return NULL_TREE;
5632 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5633 op0 = TREE_OPERAND (t, 0);
5635 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5636 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5638 /* Note that we need not handle conditional operations here since fold
5639 already handles those cases. So just do arithmetic here. */
5640 switch (tcode)
5642 case INTEGER_CST:
5643 /* For a constant, we can always simplify if we are a multiply
5644 or (for divide and modulus) if it is a multiple of our constant. */
5645 if (code == MULT_EXPR
5646 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5647 return const_binop (code, fold_convert (ctype, t),
5648 fold_convert (ctype, c));
5649 break;
5651 CASE_CONVERT: case NON_LVALUE_EXPR:
5652 /* If op0 is an expression ... */
5653 if ((COMPARISON_CLASS_P (op0)
5654 || UNARY_CLASS_P (op0)
5655 || BINARY_CLASS_P (op0)
5656 || VL_EXP_CLASS_P (op0)
5657 || EXPRESSION_CLASS_P (op0))
5658 /* ... and has wrapping overflow, and its type is smaller
5659 than ctype, then we cannot pass through as widening. */
5660 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5661 && (TYPE_PRECISION (ctype)
5662 > TYPE_PRECISION (TREE_TYPE (op0))))
5663 /* ... or this is a truncation (t is narrower than op0),
5664 then we cannot pass through this narrowing. */
5665 || (TYPE_PRECISION (type)
5666 < TYPE_PRECISION (TREE_TYPE (op0)))
5667 /* ... or signedness changes for division or modulus,
5668 then we cannot pass through this conversion. */
5669 || (code != MULT_EXPR
5670 && (TYPE_UNSIGNED (ctype)
5671 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5672 /* ... or has undefined overflow while the converted to
5673 type has not, we cannot do the operation in the inner type
5674 as that would introduce undefined overflow. */
5675 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5676 && !TYPE_OVERFLOW_UNDEFINED (type))))
5677 break;
5679 /* Pass the constant down and see if we can make a simplification. If
5680 we can, replace this expression with the inner simplification for
5681 possible later conversion to our or some other type. */
5682 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5683 && TREE_CODE (t2) == INTEGER_CST
5684 && !TREE_OVERFLOW (t2)
5685 && (0 != (t1 = extract_muldiv (op0, t2, code,
5686 code == MULT_EXPR
5687 ? ctype : NULL_TREE,
5688 strict_overflow_p))))
5689 return t1;
5690 break;
5692 case ABS_EXPR:
5693 /* If widening the type changes it from signed to unsigned, then we
5694 must avoid building ABS_EXPR itself as unsigned. */
5695 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5697 tree cstype = (*signed_type_for) (ctype);
5698 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5699 != 0)
5701 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5702 return fold_convert (ctype, t1);
5704 break;
5706 /* If the constant is negative, we cannot simplify this. */
5707 if (tree_int_cst_sgn (c) == -1)
5708 break;
5709 /* FALLTHROUGH */
5710 case NEGATE_EXPR:
5711 /* For division and modulus, type can't be unsigned, as e.g.
5712 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5713 For signed types, even with wrapping overflow, this is fine. */
5714 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5715 break;
5716 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5717 != 0)
5718 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5719 break;
5721 case MIN_EXPR: case MAX_EXPR:
5722 /* If widening the type changes the signedness, then we can't perform
5723 this optimization as that changes the result. */
5724 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5725 break;
5727 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5728 sub_strict_overflow_p = false;
5729 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5730 &sub_strict_overflow_p)) != 0
5731 && (t2 = extract_muldiv (op1, c, code, wide_type,
5732 &sub_strict_overflow_p)) != 0)
5734 if (tree_int_cst_sgn (c) < 0)
5735 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5736 if (sub_strict_overflow_p)
5737 *strict_overflow_p = true;
5738 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5739 fold_convert (ctype, t2));
5741 break;
5743 case LSHIFT_EXPR: case RSHIFT_EXPR:
5744 /* If the second operand is constant, this is a multiplication
5745 or floor division, by a power of two, so we can treat it that
5746 way unless the multiplier or divisor overflows. Signed
5747 left-shift overflow is implementation-defined rather than
5748 undefined in C90, so do not convert signed left shift into
5749 multiplication. */
5750 if (TREE_CODE (op1) == INTEGER_CST
5751 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5752 /* const_binop may not detect overflow correctly,
5753 so check for it explicitly here. */
5754 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5755 && TREE_INT_CST_HIGH (op1) == 0
5756 && 0 != (t1 = fold_convert (ctype,
5757 const_binop (LSHIFT_EXPR,
5758 size_one_node,
5759 op1)))
5760 && !TREE_OVERFLOW (t1))
5761 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5762 ? MULT_EXPR : FLOOR_DIV_EXPR,
5763 ctype,
5764 fold_convert (ctype, op0),
5765 t1),
5766 c, code, wide_type, strict_overflow_p);
5767 break;
5769 case PLUS_EXPR: case MINUS_EXPR:
5770 /* See if we can eliminate the operation on both sides. If we can, we
5771 can return a new PLUS or MINUS. If we can't, the only remaining
5772 cases where we can do anything are if the second operand is a
5773 constant. */
5774 sub_strict_overflow_p = false;
5775 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5776 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5777 if (t1 != 0 && t2 != 0
5778 && (code == MULT_EXPR
5779 /* If not multiplication, we can only do this if both operands
5780 are divisible by c. */
5781 || (multiple_of_p (ctype, op0, c)
5782 && multiple_of_p (ctype, op1, c))))
5784 if (sub_strict_overflow_p)
5785 *strict_overflow_p = true;
5786 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5787 fold_convert (ctype, t2));
5790 /* If this was a subtraction, negate OP1 and set it to be an addition.
5791 This simplifies the logic below. */
5792 if (tcode == MINUS_EXPR)
5794 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5795 /* If OP1 was not easily negatable, the constant may be OP0. */
5796 if (TREE_CODE (op0) == INTEGER_CST)
5798 tree tem = op0;
5799 op0 = op1;
5800 op1 = tem;
5801 tem = t1;
5802 t1 = t2;
5803 t2 = tem;
5807 if (TREE_CODE (op1) != INTEGER_CST)
5808 break;
5810 /* If either OP1 or C are negative, this optimization is not safe for
5811 some of the division and remainder types while for others we need
5812 to change the code. */
5813 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5815 if (code == CEIL_DIV_EXPR)
5816 code = FLOOR_DIV_EXPR;
5817 else if (code == FLOOR_DIV_EXPR)
5818 code = CEIL_DIV_EXPR;
5819 else if (code != MULT_EXPR
5820 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5821 break;
5824 /* If it's a multiply or a division/modulus operation of a multiple
5825 of our constant, do the operation and verify it doesn't overflow. */
5826 if (code == MULT_EXPR
5827 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5829 op1 = const_binop (code, fold_convert (ctype, op1),
5830 fold_convert (ctype, c));
5831 /* We allow the constant to overflow with wrapping semantics. */
5832 if (op1 == 0
5833 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5834 break;
5836 else
5837 break;
5839 /* If we have an unsigned type, we cannot widen the operation since it
5840 will change the result if the original computation overflowed. */
5841 if (TYPE_UNSIGNED (ctype) && ctype != type)
5842 break;
5844 /* If we were able to eliminate our operation from the first side,
5845 apply our operation to the second side and reform the PLUS. */
5846 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5847 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5849 /* The last case is if we are a multiply. In that case, we can
5850 apply the distributive law to commute the multiply and addition
5851 if the multiplication of the constants doesn't overflow. */
5852 if (code == MULT_EXPR)
5853 return fold_build2 (tcode, ctype,
5854 fold_build2 (code, ctype,
5855 fold_convert (ctype, op0),
5856 fold_convert (ctype, c)),
5857 op1);
5859 break;
5861 case MULT_EXPR:
5862 /* We have a special case here if we are doing something like
5863 (C * 8) % 4 since we know that's zero. */
5864 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5865 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5866 /* If the multiplication can overflow we cannot optimize this. */
5867 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5868 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5869 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5871 *strict_overflow_p = true;
5872 return omit_one_operand (type, integer_zero_node, op0);
5875 /* ... fall through ... */
5877 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5878 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5879 /* If we can extract our operation from the LHS, do so and return a
5880 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5881 do something only if the second operand is a constant. */
5882 if (same_p
5883 && (t1 = extract_muldiv (op0, c, code, wide_type,
5884 strict_overflow_p)) != 0)
5885 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5886 fold_convert (ctype, op1));
5887 else if (tcode == MULT_EXPR && code == MULT_EXPR
5888 && (t1 = extract_muldiv (op1, c, code, wide_type,
5889 strict_overflow_p)) != 0)
5890 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5891 fold_convert (ctype, t1));
5892 else if (TREE_CODE (op1) != INTEGER_CST)
5893 return 0;
5895 /* If these are the same operation types, we can associate them
5896 assuming no overflow. */
5897 if (tcode == code)
5899 double_int mul;
5900 bool overflow_p;
5901 unsigned prec = TYPE_PRECISION (ctype);
5902 bool uns = TYPE_UNSIGNED (ctype);
5903 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
5904 double_int dic = tree_to_double_int (c).ext (prec, uns);
5905 mul = diop1.mul_with_sign (dic, false, &overflow_p);
5906 overflow_p = ((!uns && overflow_p)
5907 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5908 if (!double_int_fits_to_tree_p (ctype, mul)
5909 && ((uns && tcode != MULT_EXPR) || !uns))
5910 overflow_p = 1;
5911 if (!overflow_p)
5912 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5913 double_int_to_tree (ctype, mul));
5916 /* If these operations "cancel" each other, we have the main
5917 optimizations of this pass, which occur when either constant is a
5918 multiple of the other, in which case we replace this with either an
5919 operation or CODE or TCODE.
5921 If we have an unsigned type, we cannot do this since it will change
5922 the result if the original computation overflowed. */
5923 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5924 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5925 || (tcode == MULT_EXPR
5926 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5927 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5928 && code != MULT_EXPR)))
5930 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5932 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5933 *strict_overflow_p = true;
5934 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5935 fold_convert (ctype,
5936 const_binop (TRUNC_DIV_EXPR,
5937 op1, c)));
5939 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5941 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5942 *strict_overflow_p = true;
5943 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5944 fold_convert (ctype,
5945 const_binop (TRUNC_DIV_EXPR,
5946 c, op1)));
5949 break;
5951 default:
5952 break;
5955 return 0;
5958 /* Return a node which has the indicated constant VALUE (either 0 or
5959 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5960 and is of the indicated TYPE. */
5962 tree
5963 constant_boolean_node (bool value, tree type)
5965 if (type == integer_type_node)
5966 return value ? integer_one_node : integer_zero_node;
5967 else if (type == boolean_type_node)
5968 return value ? boolean_true_node : boolean_false_node;
5969 else if (TREE_CODE (type) == VECTOR_TYPE)
5970 return build_vector_from_val (type,
5971 build_int_cst (TREE_TYPE (type),
5972 value ? -1 : 0));
5973 else
5974 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5978 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5979 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5980 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5981 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5982 COND is the first argument to CODE; otherwise (as in the example
5983 given here), it is the second argument. TYPE is the type of the
5984 original expression. Return NULL_TREE if no simplification is
5985 possible. */
5987 static tree
5988 fold_binary_op_with_conditional_arg (location_t loc,
5989 enum tree_code code,
5990 tree type, tree op0, tree op1,
5991 tree cond, tree arg, int cond_first_p)
5993 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5994 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5995 tree test, true_value, false_value;
5996 tree lhs = NULL_TREE;
5997 tree rhs = NULL_TREE;
5998 enum tree_code cond_code = COND_EXPR;
6000 if (TREE_CODE (cond) == COND_EXPR
6001 || TREE_CODE (cond) == VEC_COND_EXPR)
6003 test = TREE_OPERAND (cond, 0);
6004 true_value = TREE_OPERAND (cond, 1);
6005 false_value = TREE_OPERAND (cond, 2);
6006 /* If this operand throws an expression, then it does not make
6007 sense to try to perform a logical or arithmetic operation
6008 involving it. */
6009 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6010 lhs = true_value;
6011 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6012 rhs = false_value;
6014 else
6016 tree testtype = TREE_TYPE (cond);
6017 test = cond;
6018 true_value = constant_boolean_node (true, testtype);
6019 false_value = constant_boolean_node (false, testtype);
6022 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6023 cond_code = VEC_COND_EXPR;
6025 /* This transformation is only worthwhile if we don't have to wrap ARG
6026 in a SAVE_EXPR and the operation can be simplified without recursing
6027 on at least one of the branches once its pushed inside the COND_EXPR. */
6028 if (!TREE_CONSTANT (arg)
6029 && (TREE_SIDE_EFFECTS (arg)
6030 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6031 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6032 return NULL_TREE;
6034 arg = fold_convert_loc (loc, arg_type, arg);
6035 if (lhs == 0)
6037 true_value = fold_convert_loc (loc, cond_type, true_value);
6038 if (cond_first_p)
6039 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6040 else
6041 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6043 if (rhs == 0)
6045 false_value = fold_convert_loc (loc, cond_type, false_value);
6046 if (cond_first_p)
6047 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6048 else
6049 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6052 /* Check that we have simplified at least one of the branches. */
6053 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6054 return NULL_TREE;
6056 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6060 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6062 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6063 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6064 ADDEND is the same as X.
6066 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6067 and finite. The problematic cases are when X is zero, and its mode
6068 has signed zeros. In the case of rounding towards -infinity,
6069 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6070 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6072 bool
6073 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6075 if (!real_zerop (addend))
6076 return false;
6078 /* Don't allow the fold with -fsignaling-nans. */
6079 if (HONOR_SNANS (TYPE_MODE (type)))
6080 return false;
6082 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6083 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6084 return true;
6086 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6087 if (TREE_CODE (addend) == REAL_CST
6088 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6089 negate = !negate;
6091 /* The mode has signed zeros, and we have to honor their sign.
6092 In this situation, there is only one case we can return true for.
6093 X - 0 is the same as X unless rounding towards -infinity is
6094 supported. */
6095 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6098 /* Subroutine of fold() that checks comparisons of built-in math
6099 functions against real constants.
6101 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6102 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6103 is the type of the result and ARG0 and ARG1 are the operands of the
6104 comparison. ARG1 must be a TREE_REAL_CST.
6106 The function returns the constant folded tree if a simplification
6107 can be made, and NULL_TREE otherwise. */
6109 static tree
6110 fold_mathfn_compare (location_t loc,
6111 enum built_in_function fcode, enum tree_code code,
6112 tree type, tree arg0, tree arg1)
6114 REAL_VALUE_TYPE c;
6116 if (BUILTIN_SQRT_P (fcode))
6118 tree arg = CALL_EXPR_ARG (arg0, 0);
6119 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6121 c = TREE_REAL_CST (arg1);
6122 if (REAL_VALUE_NEGATIVE (c))
6124 /* sqrt(x) < y is always false, if y is negative. */
6125 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6126 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6128 /* sqrt(x) > y is always true, if y is negative and we
6129 don't care about NaNs, i.e. negative values of x. */
6130 if (code == NE_EXPR || !HONOR_NANS (mode))
6131 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6133 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6134 return fold_build2_loc (loc, GE_EXPR, type, arg,
6135 build_real (TREE_TYPE (arg), dconst0));
6137 else if (code == GT_EXPR || code == GE_EXPR)
6139 REAL_VALUE_TYPE c2;
6141 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6142 real_convert (&c2, mode, &c2);
6144 if (REAL_VALUE_ISINF (c2))
6146 /* sqrt(x) > y is x == +Inf, when y is very large. */
6147 if (HONOR_INFINITIES (mode))
6148 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6149 build_real (TREE_TYPE (arg), c2));
6151 /* sqrt(x) > y is always false, when y is very large
6152 and we don't care about infinities. */
6153 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6156 /* sqrt(x) > c is the same as x > c*c. */
6157 return fold_build2_loc (loc, code, type, arg,
6158 build_real (TREE_TYPE (arg), c2));
6160 else if (code == LT_EXPR || code == LE_EXPR)
6162 REAL_VALUE_TYPE c2;
6164 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6165 real_convert (&c2, mode, &c2);
6167 if (REAL_VALUE_ISINF (c2))
6169 /* sqrt(x) < y is always true, when y is a very large
6170 value and we don't care about NaNs or Infinities. */
6171 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6172 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6174 /* sqrt(x) < y is x != +Inf when y is very large and we
6175 don't care about NaNs. */
6176 if (! HONOR_NANS (mode))
6177 return fold_build2_loc (loc, NE_EXPR, type, arg,
6178 build_real (TREE_TYPE (arg), c2));
6180 /* sqrt(x) < y is x >= 0 when y is very large and we
6181 don't care about Infinities. */
6182 if (! HONOR_INFINITIES (mode))
6183 return fold_build2_loc (loc, GE_EXPR, type, arg,
6184 build_real (TREE_TYPE (arg), dconst0));
6186 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6187 arg = save_expr (arg);
6188 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6189 fold_build2_loc (loc, GE_EXPR, type, arg,
6190 build_real (TREE_TYPE (arg),
6191 dconst0)),
6192 fold_build2_loc (loc, NE_EXPR, type, arg,
6193 build_real (TREE_TYPE (arg),
6194 c2)));
6197 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6198 if (! HONOR_NANS (mode))
6199 return fold_build2_loc (loc, code, type, arg,
6200 build_real (TREE_TYPE (arg), c2));
6202 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6203 arg = save_expr (arg);
6204 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6205 fold_build2_loc (loc, GE_EXPR, type, arg,
6206 build_real (TREE_TYPE (arg),
6207 dconst0)),
6208 fold_build2_loc (loc, code, type, arg,
6209 build_real (TREE_TYPE (arg),
6210 c2)));
6214 return NULL_TREE;
6217 /* Subroutine of fold() that optimizes comparisons against Infinities,
6218 either +Inf or -Inf.
6220 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6221 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6222 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6224 The function returns the constant folded tree if a simplification
6225 can be made, and NULL_TREE otherwise. */
6227 static tree
6228 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6229 tree arg0, tree arg1)
6231 enum machine_mode mode;
6232 REAL_VALUE_TYPE max;
6233 tree temp;
6234 bool neg;
6236 mode = TYPE_MODE (TREE_TYPE (arg0));
6238 /* For negative infinity swap the sense of the comparison. */
6239 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6240 if (neg)
6241 code = swap_tree_comparison (code);
6243 switch (code)
6245 case GT_EXPR:
6246 /* x > +Inf is always false, if with ignore sNANs. */
6247 if (HONOR_SNANS (mode))
6248 return NULL_TREE;
6249 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6251 case LE_EXPR:
6252 /* x <= +Inf is always true, if we don't case about NaNs. */
6253 if (! HONOR_NANS (mode))
6254 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6256 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6257 arg0 = save_expr (arg0);
6258 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6260 case EQ_EXPR:
6261 case GE_EXPR:
6262 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6263 real_maxval (&max, neg, mode);
6264 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6265 arg0, build_real (TREE_TYPE (arg0), max));
6267 case LT_EXPR:
6268 /* x < +Inf is always equal to x <= DBL_MAX. */
6269 real_maxval (&max, neg, mode);
6270 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6271 arg0, build_real (TREE_TYPE (arg0), max));
6273 case NE_EXPR:
6274 /* x != +Inf is always equal to !(x > DBL_MAX). */
6275 real_maxval (&max, neg, mode);
6276 if (! HONOR_NANS (mode))
6277 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6278 arg0, build_real (TREE_TYPE (arg0), max));
6280 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6281 arg0, build_real (TREE_TYPE (arg0), max));
6282 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6284 default:
6285 break;
6288 return NULL_TREE;
6291 /* Subroutine of fold() that optimizes comparisons of a division by
6292 a nonzero integer constant against an integer constant, i.e.
6293 X/C1 op C2.
6295 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6296 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6297 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6299 The function returns the constant folded tree if a simplification
6300 can be made, and NULL_TREE otherwise. */
6302 static tree
6303 fold_div_compare (location_t loc,
6304 enum tree_code code, tree type, tree arg0, tree arg1)
6306 tree prod, tmp, hi, lo;
6307 tree arg00 = TREE_OPERAND (arg0, 0);
6308 tree arg01 = TREE_OPERAND (arg0, 1);
6309 double_int val;
6310 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6311 bool neg_overflow;
6312 bool overflow;
6314 /* We have to do this the hard way to detect unsigned overflow.
6315 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6316 val = TREE_INT_CST (arg01)
6317 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6318 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6319 neg_overflow = false;
6321 if (unsigned_p)
6323 tmp = int_const_binop (MINUS_EXPR, arg01,
6324 build_int_cst (TREE_TYPE (arg01), 1));
6325 lo = prod;
6327 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6328 val = TREE_INT_CST (prod)
6329 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6330 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6331 -1, overflow | TREE_OVERFLOW (prod));
6333 else if (tree_int_cst_sgn (arg01) >= 0)
6335 tmp = int_const_binop (MINUS_EXPR, arg01,
6336 build_int_cst (TREE_TYPE (arg01), 1));
6337 switch (tree_int_cst_sgn (arg1))
6339 case -1:
6340 neg_overflow = true;
6341 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6342 hi = prod;
6343 break;
6345 case 0:
6346 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6347 hi = tmp;
6348 break;
6350 case 1:
6351 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6352 lo = prod;
6353 break;
6355 default:
6356 gcc_unreachable ();
6359 else
6361 /* A negative divisor reverses the relational operators. */
6362 code = swap_tree_comparison (code);
6364 tmp = int_const_binop (PLUS_EXPR, arg01,
6365 build_int_cst (TREE_TYPE (arg01), 1));
6366 switch (tree_int_cst_sgn (arg1))
6368 case -1:
6369 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6370 lo = prod;
6371 break;
6373 case 0:
6374 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6375 lo = tmp;
6376 break;
6378 case 1:
6379 neg_overflow = true;
6380 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6381 hi = prod;
6382 break;
6384 default:
6385 gcc_unreachable ();
6389 switch (code)
6391 case EQ_EXPR:
6392 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6393 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6394 if (TREE_OVERFLOW (hi))
6395 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6396 if (TREE_OVERFLOW (lo))
6397 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6398 return build_range_check (loc, type, arg00, 1, lo, hi);
6400 case NE_EXPR:
6401 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6402 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6403 if (TREE_OVERFLOW (hi))
6404 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6405 if (TREE_OVERFLOW (lo))
6406 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6407 return build_range_check (loc, type, arg00, 0, lo, hi);
6409 case LT_EXPR:
6410 if (TREE_OVERFLOW (lo))
6412 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6413 return omit_one_operand_loc (loc, type, tmp, arg00);
6415 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6417 case LE_EXPR:
6418 if (TREE_OVERFLOW (hi))
6420 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6421 return omit_one_operand_loc (loc, type, tmp, arg00);
6423 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6425 case GT_EXPR:
6426 if (TREE_OVERFLOW (hi))
6428 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6429 return omit_one_operand_loc (loc, type, tmp, arg00);
6431 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6433 case GE_EXPR:
6434 if (TREE_OVERFLOW (lo))
6436 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6437 return omit_one_operand_loc (loc, type, tmp, arg00);
6439 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6441 default:
6442 break;
6445 return NULL_TREE;
6449 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6450 equality/inequality test, then return a simplified form of the test
6451 using a sign testing. Otherwise return NULL. TYPE is the desired
6452 result type. */
6454 static tree
6455 fold_single_bit_test_into_sign_test (location_t loc,
6456 enum tree_code code, tree arg0, tree arg1,
6457 tree result_type)
6459 /* If this is testing a single bit, we can optimize the test. */
6460 if ((code == NE_EXPR || code == EQ_EXPR)
6461 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6462 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6464 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6465 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6466 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6468 if (arg00 != NULL_TREE
6469 /* This is only a win if casting to a signed type is cheap,
6470 i.e. when arg00's type is not a partial mode. */
6471 && TYPE_PRECISION (TREE_TYPE (arg00))
6472 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6474 tree stype = signed_type_for (TREE_TYPE (arg00));
6475 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6476 result_type,
6477 fold_convert_loc (loc, stype, arg00),
6478 build_int_cst (stype, 0));
6482 return NULL_TREE;
6485 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6486 equality/inequality test, then return a simplified form of
6487 the test using shifts and logical operations. Otherwise return
6488 NULL. TYPE is the desired result type. */
6490 tree
6491 fold_single_bit_test (location_t loc, enum tree_code code,
6492 tree arg0, tree arg1, tree result_type)
6494 /* If this is testing a single bit, we can optimize the test. */
6495 if ((code == NE_EXPR || code == EQ_EXPR)
6496 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6497 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6499 tree inner = TREE_OPERAND (arg0, 0);
6500 tree type = TREE_TYPE (arg0);
6501 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6502 enum machine_mode operand_mode = TYPE_MODE (type);
6503 int ops_unsigned;
6504 tree signed_type, unsigned_type, intermediate_type;
6505 tree tem, one;
6507 /* First, see if we can fold the single bit test into a sign-bit
6508 test. */
6509 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6510 result_type);
6511 if (tem)
6512 return tem;
6514 /* Otherwise we have (A & C) != 0 where C is a single bit,
6515 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6516 Similarly for (A & C) == 0. */
6518 /* If INNER is a right shift of a constant and it plus BITNUM does
6519 not overflow, adjust BITNUM and INNER. */
6520 if (TREE_CODE (inner) == RSHIFT_EXPR
6521 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6522 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6523 && bitnum < TYPE_PRECISION (type)
6524 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6525 bitnum - TYPE_PRECISION (type)))
6527 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6528 inner = TREE_OPERAND (inner, 0);
6531 /* If we are going to be able to omit the AND below, we must do our
6532 operations as unsigned. If we must use the AND, we have a choice.
6533 Normally unsigned is faster, but for some machines signed is. */
6534 #ifdef LOAD_EXTEND_OP
6535 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6536 && !flag_syntax_only) ? 0 : 1;
6537 #else
6538 ops_unsigned = 1;
6539 #endif
6541 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6542 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6543 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6544 inner = fold_convert_loc (loc, intermediate_type, inner);
6546 if (bitnum != 0)
6547 inner = build2 (RSHIFT_EXPR, intermediate_type,
6548 inner, size_int (bitnum));
6550 one = build_int_cst (intermediate_type, 1);
6552 if (code == EQ_EXPR)
6553 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6555 /* Put the AND last so it can combine with more things. */
6556 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6558 /* Make sure to return the proper type. */
6559 inner = fold_convert_loc (loc, result_type, inner);
6561 return inner;
6563 return NULL_TREE;
6566 /* Check whether we are allowed to reorder operands arg0 and arg1,
6567 such that the evaluation of arg1 occurs before arg0. */
6569 static bool
6570 reorder_operands_p (const_tree arg0, const_tree arg1)
6572 if (! flag_evaluation_order)
6573 return true;
6574 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6575 return true;
6576 return ! TREE_SIDE_EFFECTS (arg0)
6577 && ! TREE_SIDE_EFFECTS (arg1);
6580 /* Test whether it is preferable two swap two operands, ARG0 and
6581 ARG1, for example because ARG0 is an integer constant and ARG1
6582 isn't. If REORDER is true, only recommend swapping if we can
6583 evaluate the operands in reverse order. */
6585 bool
6586 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6588 STRIP_SIGN_NOPS (arg0);
6589 STRIP_SIGN_NOPS (arg1);
6591 if (TREE_CODE (arg1) == INTEGER_CST)
6592 return 0;
6593 if (TREE_CODE (arg0) == INTEGER_CST)
6594 return 1;
6596 if (TREE_CODE (arg1) == REAL_CST)
6597 return 0;
6598 if (TREE_CODE (arg0) == REAL_CST)
6599 return 1;
6601 if (TREE_CODE (arg1) == FIXED_CST)
6602 return 0;
6603 if (TREE_CODE (arg0) == FIXED_CST)
6604 return 1;
6606 if (TREE_CODE (arg1) == COMPLEX_CST)
6607 return 0;
6608 if (TREE_CODE (arg0) == COMPLEX_CST)
6609 return 1;
6611 if (TREE_CONSTANT (arg1))
6612 return 0;
6613 if (TREE_CONSTANT (arg0))
6614 return 1;
6616 if (optimize_function_for_size_p (cfun))
6617 return 0;
6619 if (reorder && flag_evaluation_order
6620 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6621 return 0;
6623 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6624 for commutative and comparison operators. Ensuring a canonical
6625 form allows the optimizers to find additional redundancies without
6626 having to explicitly check for both orderings. */
6627 if (TREE_CODE (arg0) == SSA_NAME
6628 && TREE_CODE (arg1) == SSA_NAME
6629 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6630 return 1;
6632 /* Put SSA_NAMEs last. */
6633 if (TREE_CODE (arg1) == SSA_NAME)
6634 return 0;
6635 if (TREE_CODE (arg0) == SSA_NAME)
6636 return 1;
6638 /* Put variables last. */
6639 if (DECL_P (arg1))
6640 return 0;
6641 if (DECL_P (arg0))
6642 return 1;
6644 return 0;
6647 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6648 ARG0 is extended to a wider type. */
6650 static tree
6651 fold_widened_comparison (location_t loc, enum tree_code code,
6652 tree type, tree arg0, tree arg1)
6654 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6655 tree arg1_unw;
6656 tree shorter_type, outer_type;
6657 tree min, max;
6658 bool above, below;
6660 if (arg0_unw == arg0)
6661 return NULL_TREE;
6662 shorter_type = TREE_TYPE (arg0_unw);
6664 #ifdef HAVE_canonicalize_funcptr_for_compare
6665 /* Disable this optimization if we're casting a function pointer
6666 type on targets that require function pointer canonicalization. */
6667 if (HAVE_canonicalize_funcptr_for_compare
6668 && TREE_CODE (shorter_type) == POINTER_TYPE
6669 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6670 return NULL_TREE;
6671 #endif
6673 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6674 return NULL_TREE;
6676 arg1_unw = get_unwidened (arg1, NULL_TREE);
6678 /* If possible, express the comparison in the shorter mode. */
6679 if ((code == EQ_EXPR || code == NE_EXPR
6680 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6681 && (TREE_TYPE (arg1_unw) == shorter_type
6682 || ((TYPE_PRECISION (shorter_type)
6683 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6684 && (TYPE_UNSIGNED (shorter_type)
6685 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6686 || (TREE_CODE (arg1_unw) == INTEGER_CST
6687 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6688 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6689 && int_fits_type_p (arg1_unw, shorter_type))))
6690 return fold_build2_loc (loc, code, type, arg0_unw,
6691 fold_convert_loc (loc, shorter_type, arg1_unw));
6693 if (TREE_CODE (arg1_unw) != INTEGER_CST
6694 || TREE_CODE (shorter_type) != INTEGER_TYPE
6695 || !int_fits_type_p (arg1_unw, shorter_type))
6696 return NULL_TREE;
6698 /* If we are comparing with the integer that does not fit into the range
6699 of the shorter type, the result is known. */
6700 outer_type = TREE_TYPE (arg1_unw);
6701 min = lower_bound_in_type (outer_type, shorter_type);
6702 max = upper_bound_in_type (outer_type, shorter_type);
6704 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6705 max, arg1_unw));
6706 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6707 arg1_unw, min));
6709 switch (code)
6711 case EQ_EXPR:
6712 if (above || below)
6713 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6714 break;
6716 case NE_EXPR:
6717 if (above || below)
6718 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6719 break;
6721 case LT_EXPR:
6722 case LE_EXPR:
6723 if (above)
6724 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6725 else if (below)
6726 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6728 case GT_EXPR:
6729 case GE_EXPR:
6730 if (above)
6731 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6732 else if (below)
6733 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6735 default:
6736 break;
6739 return NULL_TREE;
6742 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6743 ARG0 just the signedness is changed. */
6745 static tree
6746 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6747 tree arg0, tree arg1)
6749 tree arg0_inner;
6750 tree inner_type, outer_type;
6752 if (!CONVERT_EXPR_P (arg0))
6753 return NULL_TREE;
6755 outer_type = TREE_TYPE (arg0);
6756 arg0_inner = TREE_OPERAND (arg0, 0);
6757 inner_type = TREE_TYPE (arg0_inner);
6759 #ifdef HAVE_canonicalize_funcptr_for_compare
6760 /* Disable this optimization if we're casting a function pointer
6761 type on targets that require function pointer canonicalization. */
6762 if (HAVE_canonicalize_funcptr_for_compare
6763 && TREE_CODE (inner_type) == POINTER_TYPE
6764 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6765 return NULL_TREE;
6766 #endif
6768 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6769 return NULL_TREE;
6771 if (TREE_CODE (arg1) != INTEGER_CST
6772 && !(CONVERT_EXPR_P (arg1)
6773 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6774 return NULL_TREE;
6776 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6777 && code != NE_EXPR
6778 && code != EQ_EXPR)
6779 return NULL_TREE;
6781 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6782 return NULL_TREE;
6784 if (TREE_CODE (arg1) == INTEGER_CST)
6785 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6786 0, TREE_OVERFLOW (arg1));
6787 else
6788 arg1 = fold_convert_loc (loc, inner_type, arg1);
6790 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6793 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6794 step of the array. Reconstructs s and delta in the case of s *
6795 delta being an integer constant (and thus already folded). ADDR is
6796 the address. MULT is the multiplicative expression. If the
6797 function succeeds, the new address expression is returned.
6798 Otherwise NULL_TREE is returned. LOC is the location of the
6799 resulting expression. */
6801 static tree
6802 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6804 tree s, delta, step;
6805 tree ref = TREE_OPERAND (addr, 0), pref;
6806 tree ret, pos;
6807 tree itype;
6808 bool mdim = false;
6810 /* Strip the nops that might be added when converting op1 to sizetype. */
6811 STRIP_NOPS (op1);
6813 /* Canonicalize op1 into a possibly non-constant delta
6814 and an INTEGER_CST s. */
6815 if (TREE_CODE (op1) == MULT_EXPR)
6817 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6819 STRIP_NOPS (arg0);
6820 STRIP_NOPS (arg1);
6822 if (TREE_CODE (arg0) == INTEGER_CST)
6824 s = arg0;
6825 delta = arg1;
6827 else if (TREE_CODE (arg1) == INTEGER_CST)
6829 s = arg1;
6830 delta = arg0;
6832 else
6833 return NULL_TREE;
6835 else if (TREE_CODE (op1) == INTEGER_CST)
6837 delta = op1;
6838 s = NULL_TREE;
6840 else
6842 /* Simulate we are delta * 1. */
6843 delta = op1;
6844 s = integer_one_node;
6847 /* Handle &x.array the same as we would handle &x.array[0]. */
6848 if (TREE_CODE (ref) == COMPONENT_REF
6849 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6851 tree domain;
6853 /* Remember if this was a multi-dimensional array. */
6854 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6855 mdim = true;
6857 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6858 if (! domain)
6859 goto cont;
6860 itype = TREE_TYPE (domain);
6862 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6863 if (TREE_CODE (step) != INTEGER_CST)
6864 goto cont;
6866 if (s)
6868 if (! tree_int_cst_equal (step, s))
6869 goto cont;
6871 else
6873 /* Try if delta is a multiple of step. */
6874 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6875 if (! tmp)
6876 goto cont;
6877 delta = tmp;
6880 /* Only fold here if we can verify we do not overflow one
6881 dimension of a multi-dimensional array. */
6882 if (mdim)
6884 tree tmp;
6886 if (!TYPE_MIN_VALUE (domain)
6887 || !TYPE_MAX_VALUE (domain)
6888 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6889 goto cont;
6891 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6892 fold_convert_loc (loc, itype,
6893 TYPE_MIN_VALUE (domain)),
6894 fold_convert_loc (loc, itype, delta));
6895 if (TREE_CODE (tmp) != INTEGER_CST
6896 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6897 goto cont;
6900 /* We found a suitable component reference. */
6902 pref = TREE_OPERAND (addr, 0);
6903 ret = copy_node (pref);
6904 SET_EXPR_LOCATION (ret, loc);
6906 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6907 fold_build2_loc
6908 (loc, PLUS_EXPR, itype,
6909 fold_convert_loc (loc, itype,
6910 TYPE_MIN_VALUE
6911 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6912 fold_convert_loc (loc, itype, delta)),
6913 NULL_TREE, NULL_TREE);
6914 return build_fold_addr_expr_loc (loc, ret);
6917 cont:
6919 for (;; ref = TREE_OPERAND (ref, 0))
6921 if (TREE_CODE (ref) == ARRAY_REF)
6923 tree domain;
6925 /* Remember if this was a multi-dimensional array. */
6926 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6927 mdim = true;
6929 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6930 if (! domain)
6931 continue;
6932 itype = TREE_TYPE (domain);
6934 step = array_ref_element_size (ref);
6935 if (TREE_CODE (step) != INTEGER_CST)
6936 continue;
6938 if (s)
6940 if (! tree_int_cst_equal (step, s))
6941 continue;
6943 else
6945 /* Try if delta is a multiple of step. */
6946 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6947 if (! tmp)
6948 continue;
6949 delta = tmp;
6952 /* Only fold here if we can verify we do not overflow one
6953 dimension of a multi-dimensional array. */
6954 if (mdim)
6956 tree tmp;
6958 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6959 || !TYPE_MAX_VALUE (domain)
6960 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6961 continue;
6963 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6964 fold_convert_loc (loc, itype,
6965 TREE_OPERAND (ref, 1)),
6966 fold_convert_loc (loc, itype, delta));
6967 if (!tmp
6968 || TREE_CODE (tmp) != INTEGER_CST
6969 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6970 continue;
6973 break;
6975 else
6976 mdim = false;
6978 if (!handled_component_p (ref))
6979 return NULL_TREE;
6982 /* We found the suitable array reference. So copy everything up to it,
6983 and replace the index. */
6985 pref = TREE_OPERAND (addr, 0);
6986 ret = copy_node (pref);
6987 SET_EXPR_LOCATION (ret, loc);
6988 pos = ret;
6990 while (pref != ref)
6992 pref = TREE_OPERAND (pref, 0);
6993 TREE_OPERAND (pos, 0) = copy_node (pref);
6994 pos = TREE_OPERAND (pos, 0);
6997 TREE_OPERAND (pos, 1)
6998 = fold_build2_loc (loc, PLUS_EXPR, itype,
6999 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7000 fold_convert_loc (loc, itype, delta));
7001 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7005 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7006 means A >= Y && A != MAX, but in this case we know that
7007 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7009 static tree
7010 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7012 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7014 if (TREE_CODE (bound) == LT_EXPR)
7015 a = TREE_OPERAND (bound, 0);
7016 else if (TREE_CODE (bound) == GT_EXPR)
7017 a = TREE_OPERAND (bound, 1);
7018 else
7019 return NULL_TREE;
7021 typea = TREE_TYPE (a);
7022 if (!INTEGRAL_TYPE_P (typea)
7023 && !POINTER_TYPE_P (typea))
7024 return NULL_TREE;
7026 if (TREE_CODE (ineq) == LT_EXPR)
7028 a1 = TREE_OPERAND (ineq, 1);
7029 y = TREE_OPERAND (ineq, 0);
7031 else if (TREE_CODE (ineq) == GT_EXPR)
7033 a1 = TREE_OPERAND (ineq, 0);
7034 y = TREE_OPERAND (ineq, 1);
7036 else
7037 return NULL_TREE;
7039 if (TREE_TYPE (a1) != typea)
7040 return NULL_TREE;
7042 if (POINTER_TYPE_P (typea))
7044 /* Convert the pointer types into integer before taking the difference. */
7045 tree ta = fold_convert_loc (loc, ssizetype, a);
7046 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7047 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7049 else
7050 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7052 if (!diff || !integer_onep (diff))
7053 return NULL_TREE;
7055 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7058 /* Fold a sum or difference of at least one multiplication.
7059 Returns the folded tree or NULL if no simplification could be made. */
7061 static tree
7062 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7063 tree arg0, tree arg1)
7065 tree arg00, arg01, arg10, arg11;
7066 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7068 /* (A * C) +- (B * C) -> (A+-B) * C.
7069 (A * C) +- A -> A * (C+-1).
7070 We are most concerned about the case where C is a constant,
7071 but other combinations show up during loop reduction. Since
7072 it is not difficult, try all four possibilities. */
7074 if (TREE_CODE (arg0) == MULT_EXPR)
7076 arg00 = TREE_OPERAND (arg0, 0);
7077 arg01 = TREE_OPERAND (arg0, 1);
7079 else if (TREE_CODE (arg0) == INTEGER_CST)
7081 arg00 = build_one_cst (type);
7082 arg01 = arg0;
7084 else
7086 /* We cannot generate constant 1 for fract. */
7087 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7088 return NULL_TREE;
7089 arg00 = arg0;
7090 arg01 = build_one_cst (type);
7092 if (TREE_CODE (arg1) == MULT_EXPR)
7094 arg10 = TREE_OPERAND (arg1, 0);
7095 arg11 = TREE_OPERAND (arg1, 1);
7097 else if (TREE_CODE (arg1) == INTEGER_CST)
7099 arg10 = build_one_cst (type);
7100 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7101 the purpose of this canonicalization. */
7102 if (TREE_INT_CST_HIGH (arg1) == -1
7103 && negate_expr_p (arg1)
7104 && code == PLUS_EXPR)
7106 arg11 = negate_expr (arg1);
7107 code = MINUS_EXPR;
7109 else
7110 arg11 = arg1;
7112 else
7114 /* We cannot generate constant 1 for fract. */
7115 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7116 return NULL_TREE;
7117 arg10 = arg1;
7118 arg11 = build_one_cst (type);
7120 same = NULL_TREE;
7122 if (operand_equal_p (arg01, arg11, 0))
7123 same = arg01, alt0 = arg00, alt1 = arg10;
7124 else if (operand_equal_p (arg00, arg10, 0))
7125 same = arg00, alt0 = arg01, alt1 = arg11;
7126 else if (operand_equal_p (arg00, arg11, 0))
7127 same = arg00, alt0 = arg01, alt1 = arg10;
7128 else if (operand_equal_p (arg01, arg10, 0))
7129 same = arg01, alt0 = arg00, alt1 = arg11;
7131 /* No identical multiplicands; see if we can find a common
7132 power-of-two factor in non-power-of-two multiplies. This
7133 can help in multi-dimensional array access. */
7134 else if (host_integerp (arg01, 0)
7135 && host_integerp (arg11, 0))
7137 HOST_WIDE_INT int01, int11, tmp;
7138 bool swap = false;
7139 tree maybe_same;
7140 int01 = TREE_INT_CST_LOW (arg01);
7141 int11 = TREE_INT_CST_LOW (arg11);
7143 /* Move min of absolute values to int11. */
7144 if (absu_hwi (int01) < absu_hwi (int11))
7146 tmp = int01, int01 = int11, int11 = tmp;
7147 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7148 maybe_same = arg01;
7149 swap = true;
7151 else
7152 maybe_same = arg11;
7154 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7155 /* The remainder should not be a constant, otherwise we
7156 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7157 increased the number of multiplications necessary. */
7158 && TREE_CODE (arg10) != INTEGER_CST)
7160 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7161 build_int_cst (TREE_TYPE (arg00),
7162 int01 / int11));
7163 alt1 = arg10;
7164 same = maybe_same;
7165 if (swap)
7166 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7170 if (same)
7171 return fold_build2_loc (loc, MULT_EXPR, type,
7172 fold_build2_loc (loc, code, type,
7173 fold_convert_loc (loc, type, alt0),
7174 fold_convert_loc (loc, type, alt1)),
7175 fold_convert_loc (loc, type, same));
7177 return NULL_TREE;
7180 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7181 specified by EXPR into the buffer PTR of length LEN bytes.
7182 Return the number of bytes placed in the buffer, or zero
7183 upon failure. */
7185 static int
7186 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7188 tree type = TREE_TYPE (expr);
7189 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7190 int byte, offset, word, words;
7191 unsigned char value;
7193 if (total_bytes > len)
7194 return 0;
7195 words = total_bytes / UNITS_PER_WORD;
7197 for (byte = 0; byte < total_bytes; byte++)
7199 int bitpos = byte * BITS_PER_UNIT;
7200 if (bitpos < HOST_BITS_PER_WIDE_INT)
7201 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7202 else
7203 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7204 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7206 if (total_bytes > UNITS_PER_WORD)
7208 word = byte / UNITS_PER_WORD;
7209 if (WORDS_BIG_ENDIAN)
7210 word = (words - 1) - word;
7211 offset = word * UNITS_PER_WORD;
7212 if (BYTES_BIG_ENDIAN)
7213 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7214 else
7215 offset += byte % UNITS_PER_WORD;
7217 else
7218 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7219 ptr[offset] = value;
7221 return total_bytes;
7225 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7226 specified by EXPR into the buffer PTR of length LEN bytes.
7227 Return the number of bytes placed in the buffer, or zero
7228 upon failure. */
7230 static int
7231 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7233 tree type = TREE_TYPE (expr);
7234 enum machine_mode mode = TYPE_MODE (type);
7235 int total_bytes = GET_MODE_SIZE (mode);
7236 FIXED_VALUE_TYPE value;
7237 tree i_value, i_type;
7239 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7240 return 0;
7242 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7244 if (NULL_TREE == i_type
7245 || TYPE_PRECISION (i_type) != total_bytes)
7246 return 0;
7248 value = TREE_FIXED_CST (expr);
7249 i_value = double_int_to_tree (i_type, value.data);
7251 return native_encode_int (i_value, ptr, len);
7255 /* Subroutine of native_encode_expr. Encode the REAL_CST
7256 specified by EXPR into the buffer PTR of length LEN bytes.
7257 Return the number of bytes placed in the buffer, or zero
7258 upon failure. */
7260 static int
7261 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7263 tree type = TREE_TYPE (expr);
7264 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7265 int byte, offset, word, words, bitpos;
7266 unsigned char value;
7268 /* There are always 32 bits in each long, no matter the size of
7269 the hosts long. We handle floating point representations with
7270 up to 192 bits. */
7271 long tmp[6];
7273 if (total_bytes > len)
7274 return 0;
7275 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7277 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7279 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7280 bitpos += BITS_PER_UNIT)
7282 byte = (bitpos / BITS_PER_UNIT) & 3;
7283 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7285 if (UNITS_PER_WORD < 4)
7287 word = byte / UNITS_PER_WORD;
7288 if (WORDS_BIG_ENDIAN)
7289 word = (words - 1) - word;
7290 offset = word * UNITS_PER_WORD;
7291 if (BYTES_BIG_ENDIAN)
7292 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7293 else
7294 offset += byte % UNITS_PER_WORD;
7296 else
7297 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7298 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7300 return total_bytes;
7303 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7304 specified by EXPR into the buffer PTR of length LEN bytes.
7305 Return the number of bytes placed in the buffer, or zero
7306 upon failure. */
7308 static int
7309 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7311 int rsize, isize;
7312 tree part;
7314 part = TREE_REALPART (expr);
7315 rsize = native_encode_expr (part, ptr, len);
7316 if (rsize == 0)
7317 return 0;
7318 part = TREE_IMAGPART (expr);
7319 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7320 if (isize != rsize)
7321 return 0;
7322 return rsize + isize;
7326 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7327 specified by EXPR into the buffer PTR of length LEN bytes.
7328 Return the number of bytes placed in the buffer, or zero
7329 upon failure. */
7331 static int
7332 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7334 unsigned i, count;
7335 int size, offset;
7336 tree itype, elem;
7338 offset = 0;
7339 count = VECTOR_CST_NELTS (expr);
7340 itype = TREE_TYPE (TREE_TYPE (expr));
7341 size = GET_MODE_SIZE (TYPE_MODE (itype));
7342 for (i = 0; i < count; i++)
7344 elem = VECTOR_CST_ELT (expr, i);
7345 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7346 return 0;
7347 offset += size;
7349 return offset;
7353 /* Subroutine of native_encode_expr. Encode the STRING_CST
7354 specified by EXPR into the buffer PTR of length LEN bytes.
7355 Return the number of bytes placed in the buffer, or zero
7356 upon failure. */
7358 static int
7359 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7361 tree type = TREE_TYPE (expr);
7362 HOST_WIDE_INT total_bytes;
7364 if (TREE_CODE (type) != ARRAY_TYPE
7365 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7366 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7367 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7368 return 0;
7369 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7370 if (total_bytes > len)
7371 return 0;
7372 if (TREE_STRING_LENGTH (expr) < total_bytes)
7374 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7375 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7376 total_bytes - TREE_STRING_LENGTH (expr));
7378 else
7379 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7380 return total_bytes;
7384 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7385 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7386 buffer PTR of length LEN bytes. Return the number of bytes
7387 placed in the buffer, or zero upon failure. */
7390 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7392 switch (TREE_CODE (expr))
7394 case INTEGER_CST:
7395 return native_encode_int (expr, ptr, len);
7397 case REAL_CST:
7398 return native_encode_real (expr, ptr, len);
7400 case FIXED_CST:
7401 return native_encode_fixed (expr, ptr, len);
7403 case COMPLEX_CST:
7404 return native_encode_complex (expr, ptr, len);
7406 case VECTOR_CST:
7407 return native_encode_vector (expr, ptr, len);
7409 case STRING_CST:
7410 return native_encode_string (expr, ptr, len);
7412 default:
7413 return 0;
7418 /* Subroutine of native_interpret_expr. Interpret the contents of
7419 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7420 If the buffer cannot be interpreted, return NULL_TREE. */
7422 static tree
7423 native_interpret_int (tree type, const unsigned char *ptr, int len)
7425 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7426 double_int result;
7428 if (total_bytes > len
7429 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7430 return NULL_TREE;
7432 result = double_int::from_buffer (ptr, total_bytes);
7434 return double_int_to_tree (type, result);
7438 /* Subroutine of native_interpret_expr. Interpret the contents of
7439 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7440 If the buffer cannot be interpreted, return NULL_TREE. */
7442 static tree
7443 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7445 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7446 double_int result;
7447 FIXED_VALUE_TYPE fixed_value;
7449 if (total_bytes > len
7450 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7451 return NULL_TREE;
7453 result = double_int::from_buffer (ptr, total_bytes);
7454 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7456 return build_fixed (type, fixed_value);
7460 /* Subroutine of native_interpret_expr. Interpret the contents of
7461 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7462 If the buffer cannot be interpreted, return NULL_TREE. */
7464 static tree
7465 native_interpret_real (tree type, const unsigned char *ptr, int len)
7467 enum machine_mode mode = TYPE_MODE (type);
7468 int total_bytes = GET_MODE_SIZE (mode);
7469 int byte, offset, word, words, bitpos;
7470 unsigned char value;
7471 /* There are always 32 bits in each long, no matter the size of
7472 the hosts long. We handle floating point representations with
7473 up to 192 bits. */
7474 REAL_VALUE_TYPE r;
7475 long tmp[6];
7477 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7478 if (total_bytes > len || total_bytes > 24)
7479 return NULL_TREE;
7480 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7482 memset (tmp, 0, sizeof (tmp));
7483 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7484 bitpos += BITS_PER_UNIT)
7486 byte = (bitpos / BITS_PER_UNIT) & 3;
7487 if (UNITS_PER_WORD < 4)
7489 word = byte / UNITS_PER_WORD;
7490 if (WORDS_BIG_ENDIAN)
7491 word = (words - 1) - word;
7492 offset = word * UNITS_PER_WORD;
7493 if (BYTES_BIG_ENDIAN)
7494 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7495 else
7496 offset += byte % UNITS_PER_WORD;
7498 else
7499 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7500 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7502 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7505 real_from_target (&r, tmp, mode);
7506 return build_real (type, r);
7510 /* Subroutine of native_interpret_expr. Interpret the contents of
7511 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7512 If the buffer cannot be interpreted, return NULL_TREE. */
7514 static tree
7515 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7517 tree etype, rpart, ipart;
7518 int size;
7520 etype = TREE_TYPE (type);
7521 size = GET_MODE_SIZE (TYPE_MODE (etype));
7522 if (size * 2 > len)
7523 return NULL_TREE;
7524 rpart = native_interpret_expr (etype, ptr, size);
7525 if (!rpart)
7526 return NULL_TREE;
7527 ipart = native_interpret_expr (etype, ptr+size, size);
7528 if (!ipart)
7529 return NULL_TREE;
7530 return build_complex (type, rpart, ipart);
7534 /* Subroutine of native_interpret_expr. Interpret the contents of
7535 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7536 If the buffer cannot be interpreted, return NULL_TREE. */
7538 static tree
7539 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7541 tree etype, elem;
7542 int i, size, count;
7543 tree *elements;
7545 etype = TREE_TYPE (type);
7546 size = GET_MODE_SIZE (TYPE_MODE (etype));
7547 count = TYPE_VECTOR_SUBPARTS (type);
7548 if (size * count > len)
7549 return NULL_TREE;
7551 elements = XALLOCAVEC (tree, count);
7552 for (i = count - 1; i >= 0; i--)
7554 elem = native_interpret_expr (etype, ptr+(i*size), size);
7555 if (!elem)
7556 return NULL_TREE;
7557 elements[i] = elem;
7559 return build_vector (type, elements);
7563 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7564 the buffer PTR of length LEN as a constant of type TYPE. For
7565 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7566 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7567 return NULL_TREE. */
7569 tree
7570 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7572 switch (TREE_CODE (type))
7574 case INTEGER_TYPE:
7575 case ENUMERAL_TYPE:
7576 case BOOLEAN_TYPE:
7577 case POINTER_TYPE:
7578 case REFERENCE_TYPE:
7579 return native_interpret_int (type, ptr, len);
7581 case REAL_TYPE:
7582 return native_interpret_real (type, ptr, len);
7584 case FIXED_POINT_TYPE:
7585 return native_interpret_fixed (type, ptr, len);
7587 case COMPLEX_TYPE:
7588 return native_interpret_complex (type, ptr, len);
7590 case VECTOR_TYPE:
7591 return native_interpret_vector (type, ptr, len);
7593 default:
7594 return NULL_TREE;
7598 /* Returns true if we can interpret the contents of a native encoding
7599 as TYPE. */
7601 static bool
7602 can_native_interpret_type_p (tree type)
7604 switch (TREE_CODE (type))
7606 case INTEGER_TYPE:
7607 case ENUMERAL_TYPE:
7608 case BOOLEAN_TYPE:
7609 case POINTER_TYPE:
7610 case REFERENCE_TYPE:
7611 case FIXED_POINT_TYPE:
7612 case REAL_TYPE:
7613 case COMPLEX_TYPE:
7614 case VECTOR_TYPE:
7615 return true;
7616 default:
7617 return false;
7621 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7622 TYPE at compile-time. If we're unable to perform the conversion
7623 return NULL_TREE. */
7625 static tree
7626 fold_view_convert_expr (tree type, tree expr)
7628 /* We support up to 512-bit values (for V8DFmode). */
7629 unsigned char buffer[64];
7630 int len;
7632 /* Check that the host and target are sane. */
7633 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7634 return NULL_TREE;
7636 len = native_encode_expr (expr, buffer, sizeof (buffer));
7637 if (len == 0)
7638 return NULL_TREE;
7640 return native_interpret_expr (type, buffer, len);
7643 /* Build an expression for the address of T. Folds away INDIRECT_REF
7644 to avoid confusing the gimplify process. */
7646 tree
7647 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7649 /* The size of the object is not relevant when talking about its address. */
7650 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7651 t = TREE_OPERAND (t, 0);
7653 if (TREE_CODE (t) == INDIRECT_REF)
7655 t = TREE_OPERAND (t, 0);
7657 if (TREE_TYPE (t) != ptrtype)
7658 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7660 else if (TREE_CODE (t) == MEM_REF
7661 && integer_zerop (TREE_OPERAND (t, 1)))
7662 return TREE_OPERAND (t, 0);
7663 else if (TREE_CODE (t) == MEM_REF
7664 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7665 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7666 TREE_OPERAND (t, 0),
7667 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7668 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7670 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7672 if (TREE_TYPE (t) != ptrtype)
7673 t = fold_convert_loc (loc, ptrtype, t);
7675 else
7676 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7678 return t;
7681 /* Build an expression for the address of T. */
7683 tree
7684 build_fold_addr_expr_loc (location_t loc, tree t)
7686 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7688 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7691 static bool vec_cst_ctor_to_array (tree, tree *);
7693 /* Fold a unary expression of code CODE and type TYPE with operand
7694 OP0. Return the folded expression if folding is successful.
7695 Otherwise, return NULL_TREE. */
7697 tree
7698 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7700 tree tem;
7701 tree arg0;
7702 enum tree_code_class kind = TREE_CODE_CLASS (code);
7704 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7705 && TREE_CODE_LENGTH (code) == 1);
7707 arg0 = op0;
7708 if (arg0)
7710 if (CONVERT_EXPR_CODE_P (code)
7711 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7713 /* Don't use STRIP_NOPS, because signedness of argument type
7714 matters. */
7715 STRIP_SIGN_NOPS (arg0);
7717 else
7719 /* Strip any conversions that don't change the mode. This
7720 is safe for every expression, except for a comparison
7721 expression because its signedness is derived from its
7722 operands.
7724 Note that this is done as an internal manipulation within
7725 the constant folder, in order to find the simplest
7726 representation of the arguments so that their form can be
7727 studied. In any cases, the appropriate type conversions
7728 should be put back in the tree that will get out of the
7729 constant folder. */
7730 STRIP_NOPS (arg0);
7734 if (TREE_CODE_CLASS (code) == tcc_unary)
7736 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7737 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7738 fold_build1_loc (loc, code, type,
7739 fold_convert_loc (loc, TREE_TYPE (op0),
7740 TREE_OPERAND (arg0, 1))));
7741 else if (TREE_CODE (arg0) == COND_EXPR)
7743 tree arg01 = TREE_OPERAND (arg0, 1);
7744 tree arg02 = TREE_OPERAND (arg0, 2);
7745 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7746 arg01 = fold_build1_loc (loc, code, type,
7747 fold_convert_loc (loc,
7748 TREE_TYPE (op0), arg01));
7749 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7750 arg02 = fold_build1_loc (loc, code, type,
7751 fold_convert_loc (loc,
7752 TREE_TYPE (op0), arg02));
7753 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7754 arg01, arg02);
7756 /* If this was a conversion, and all we did was to move into
7757 inside the COND_EXPR, bring it back out. But leave it if
7758 it is a conversion from integer to integer and the
7759 result precision is no wider than a word since such a
7760 conversion is cheap and may be optimized away by combine,
7761 while it couldn't if it were outside the COND_EXPR. Then return
7762 so we don't get into an infinite recursion loop taking the
7763 conversion out and then back in. */
7765 if ((CONVERT_EXPR_CODE_P (code)
7766 || code == NON_LVALUE_EXPR)
7767 && TREE_CODE (tem) == COND_EXPR
7768 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7769 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7770 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7771 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7772 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7773 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7774 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7775 && (INTEGRAL_TYPE_P
7776 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7777 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7778 || flag_syntax_only))
7779 tem = build1_loc (loc, code, type,
7780 build3 (COND_EXPR,
7781 TREE_TYPE (TREE_OPERAND
7782 (TREE_OPERAND (tem, 1), 0)),
7783 TREE_OPERAND (tem, 0),
7784 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7785 TREE_OPERAND (TREE_OPERAND (tem, 2),
7786 0)));
7787 return tem;
7791 switch (code)
7793 case PAREN_EXPR:
7794 /* Re-association barriers around constants and other re-association
7795 barriers can be removed. */
7796 if (CONSTANT_CLASS_P (op0)
7797 || TREE_CODE (op0) == PAREN_EXPR)
7798 return fold_convert_loc (loc, type, op0);
7799 return NULL_TREE;
7801 CASE_CONVERT:
7802 case FLOAT_EXPR:
7803 case FIX_TRUNC_EXPR:
7804 if (TREE_TYPE (op0) == type)
7805 return op0;
7807 if (COMPARISON_CLASS_P (op0))
7809 /* If we have (type) (a CMP b) and type is an integral type, return
7810 new expression involving the new type. Canonicalize
7811 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7812 non-integral type.
7813 Do not fold the result as that would not simplify further, also
7814 folding again results in recursions. */
7815 if (TREE_CODE (type) == BOOLEAN_TYPE)
7816 return build2_loc (loc, TREE_CODE (op0), type,
7817 TREE_OPERAND (op0, 0),
7818 TREE_OPERAND (op0, 1));
7819 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7820 && TREE_CODE (type) != VECTOR_TYPE)
7821 return build3_loc (loc, COND_EXPR, type, op0,
7822 constant_boolean_node (true, type),
7823 constant_boolean_node (false, type));
7826 /* Handle cases of two conversions in a row. */
7827 if (CONVERT_EXPR_P (op0))
7829 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7830 tree inter_type = TREE_TYPE (op0);
7831 int inside_int = INTEGRAL_TYPE_P (inside_type);
7832 int inside_ptr = POINTER_TYPE_P (inside_type);
7833 int inside_float = FLOAT_TYPE_P (inside_type);
7834 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7835 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7836 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7837 int inter_int = INTEGRAL_TYPE_P (inter_type);
7838 int inter_ptr = POINTER_TYPE_P (inter_type);
7839 int inter_float = FLOAT_TYPE_P (inter_type);
7840 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7841 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7842 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7843 int final_int = INTEGRAL_TYPE_P (type);
7844 int final_ptr = POINTER_TYPE_P (type);
7845 int final_float = FLOAT_TYPE_P (type);
7846 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7847 unsigned int final_prec = TYPE_PRECISION (type);
7848 int final_unsignedp = TYPE_UNSIGNED (type);
7850 /* In addition to the cases of two conversions in a row
7851 handled below, if we are converting something to its own
7852 type via an object of identical or wider precision, neither
7853 conversion is needed. */
7854 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7855 && (((inter_int || inter_ptr) && final_int)
7856 || (inter_float && final_float))
7857 && inter_prec >= final_prec)
7858 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7860 /* Likewise, if the intermediate and initial types are either both
7861 float or both integer, we don't need the middle conversion if the
7862 former is wider than the latter and doesn't change the signedness
7863 (for integers). Avoid this if the final type is a pointer since
7864 then we sometimes need the middle conversion. Likewise if the
7865 final type has a precision not equal to the size of its mode. */
7866 if (((inter_int && inside_int)
7867 || (inter_float && inside_float)
7868 || (inter_vec && inside_vec))
7869 && inter_prec >= inside_prec
7870 && (inter_float || inter_vec
7871 || inter_unsignedp == inside_unsignedp)
7872 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7873 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7874 && ! final_ptr
7875 && (! final_vec || inter_prec == inside_prec))
7876 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7878 /* If we have a sign-extension of a zero-extended value, we can
7879 replace that by a single zero-extension. Likewise if the
7880 final conversion does not change precision we can drop the
7881 intermediate conversion. */
7882 if (inside_int && inter_int && final_int
7883 && ((inside_prec < inter_prec && inter_prec < final_prec
7884 && inside_unsignedp && !inter_unsignedp)
7885 || final_prec == inter_prec))
7886 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7888 /* Two conversions in a row are not needed unless:
7889 - some conversion is floating-point (overstrict for now), or
7890 - some conversion is a vector (overstrict for now), or
7891 - the intermediate type is narrower than both initial and
7892 final, or
7893 - the intermediate type and innermost type differ in signedness,
7894 and the outermost type is wider than the intermediate, or
7895 - the initial type is a pointer type and the precisions of the
7896 intermediate and final types differ, or
7897 - the final type is a pointer type and the precisions of the
7898 initial and intermediate types differ. */
7899 if (! inside_float && ! inter_float && ! final_float
7900 && ! inside_vec && ! inter_vec && ! final_vec
7901 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7902 && ! (inside_int && inter_int
7903 && inter_unsignedp != inside_unsignedp
7904 && inter_prec < final_prec)
7905 && ((inter_unsignedp && inter_prec > inside_prec)
7906 == (final_unsignedp && final_prec > inter_prec))
7907 && ! (inside_ptr && inter_prec != final_prec)
7908 && ! (final_ptr && inside_prec != inter_prec)
7909 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7910 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7911 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7914 /* Handle (T *)&A.B.C for A being of type T and B and C
7915 living at offset zero. This occurs frequently in
7916 C++ upcasting and then accessing the base. */
7917 if (TREE_CODE (op0) == ADDR_EXPR
7918 && POINTER_TYPE_P (type)
7919 && handled_component_p (TREE_OPERAND (op0, 0)))
7921 HOST_WIDE_INT bitsize, bitpos;
7922 tree offset;
7923 enum machine_mode mode;
7924 int unsignedp, volatilep;
7925 tree base = TREE_OPERAND (op0, 0);
7926 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7927 &mode, &unsignedp, &volatilep, false);
7928 /* If the reference was to a (constant) zero offset, we can use
7929 the address of the base if it has the same base type
7930 as the result type and the pointer type is unqualified. */
7931 if (! offset && bitpos == 0
7932 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7933 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7934 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7935 return fold_convert_loc (loc, type,
7936 build_fold_addr_expr_loc (loc, base));
7939 if (TREE_CODE (op0) == MODIFY_EXPR
7940 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7941 /* Detect assigning a bitfield. */
7942 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7943 && DECL_BIT_FIELD
7944 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7946 /* Don't leave an assignment inside a conversion
7947 unless assigning a bitfield. */
7948 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7949 /* First do the assignment, then return converted constant. */
7950 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7951 TREE_NO_WARNING (tem) = 1;
7952 TREE_USED (tem) = 1;
7953 return tem;
7956 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7957 constants (if x has signed type, the sign bit cannot be set
7958 in c). This folds extension into the BIT_AND_EXPR.
7959 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7960 very likely don't have maximal range for their precision and this
7961 transformation effectively doesn't preserve non-maximal ranges. */
7962 if (TREE_CODE (type) == INTEGER_TYPE
7963 && TREE_CODE (op0) == BIT_AND_EXPR
7964 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7966 tree and_expr = op0;
7967 tree and0 = TREE_OPERAND (and_expr, 0);
7968 tree and1 = TREE_OPERAND (and_expr, 1);
7969 int change = 0;
7971 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7972 || (TYPE_PRECISION (type)
7973 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7974 change = 1;
7975 else if (TYPE_PRECISION (TREE_TYPE (and1))
7976 <= HOST_BITS_PER_WIDE_INT
7977 && host_integerp (and1, 1))
7979 unsigned HOST_WIDE_INT cst;
7981 cst = tree_low_cst (and1, 1);
7982 cst &= (HOST_WIDE_INT) -1
7983 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7984 change = (cst == 0);
7985 #ifdef LOAD_EXTEND_OP
7986 if (change
7987 && !flag_syntax_only
7988 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7989 == ZERO_EXTEND))
7991 tree uns = unsigned_type_for (TREE_TYPE (and0));
7992 and0 = fold_convert_loc (loc, uns, and0);
7993 and1 = fold_convert_loc (loc, uns, and1);
7995 #endif
7997 if (change)
7999 tem = force_fit_type_double (type, tree_to_double_int (and1),
8000 0, TREE_OVERFLOW (and1));
8001 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8002 fold_convert_loc (loc, type, and0), tem);
8006 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8007 when one of the new casts will fold away. Conservatively we assume
8008 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8009 if (POINTER_TYPE_P (type)
8010 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8011 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8012 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8013 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8014 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8016 tree arg00 = TREE_OPERAND (arg0, 0);
8017 tree arg01 = TREE_OPERAND (arg0, 1);
8019 return fold_build_pointer_plus_loc
8020 (loc, fold_convert_loc (loc, type, arg00), arg01);
8023 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8024 of the same precision, and X is an integer type not narrower than
8025 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8026 if (INTEGRAL_TYPE_P (type)
8027 && TREE_CODE (op0) == BIT_NOT_EXPR
8028 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8029 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8030 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8032 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8033 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8034 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8035 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8036 fold_convert_loc (loc, type, tem));
8039 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8040 type of X and Y (integer types only). */
8041 if (INTEGRAL_TYPE_P (type)
8042 && TREE_CODE (op0) == MULT_EXPR
8043 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8044 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8046 /* Be careful not to introduce new overflows. */
8047 tree mult_type;
8048 if (TYPE_OVERFLOW_WRAPS (type))
8049 mult_type = type;
8050 else
8051 mult_type = unsigned_type_for (type);
8053 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8055 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8056 fold_convert_loc (loc, mult_type,
8057 TREE_OPERAND (op0, 0)),
8058 fold_convert_loc (loc, mult_type,
8059 TREE_OPERAND (op0, 1)));
8060 return fold_convert_loc (loc, type, tem);
8064 tem = fold_convert_const (code, type, op0);
8065 return tem ? tem : NULL_TREE;
8067 case ADDR_SPACE_CONVERT_EXPR:
8068 if (integer_zerop (arg0))
8069 return fold_convert_const (code, type, arg0);
8070 return NULL_TREE;
8072 case FIXED_CONVERT_EXPR:
8073 tem = fold_convert_const (code, type, arg0);
8074 return tem ? tem : NULL_TREE;
8076 case VIEW_CONVERT_EXPR:
8077 if (TREE_TYPE (op0) == type)
8078 return op0;
8079 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8080 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8081 type, TREE_OPERAND (op0, 0));
8082 if (TREE_CODE (op0) == MEM_REF)
8083 return fold_build2_loc (loc, MEM_REF, type,
8084 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8086 /* For integral conversions with the same precision or pointer
8087 conversions use a NOP_EXPR instead. */
8088 if ((INTEGRAL_TYPE_P (type)
8089 || POINTER_TYPE_P (type))
8090 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8091 || POINTER_TYPE_P (TREE_TYPE (op0)))
8092 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8093 return fold_convert_loc (loc, type, op0);
8095 /* Strip inner integral conversions that do not change the precision. */
8096 if (CONVERT_EXPR_P (op0)
8097 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8098 || POINTER_TYPE_P (TREE_TYPE (op0)))
8099 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8100 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8101 && (TYPE_PRECISION (TREE_TYPE (op0))
8102 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8103 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8104 type, TREE_OPERAND (op0, 0));
8106 return fold_view_convert_expr (type, op0);
8108 case NEGATE_EXPR:
8109 tem = fold_negate_expr (loc, arg0);
8110 if (tem)
8111 return fold_convert_loc (loc, type, tem);
8112 return NULL_TREE;
8114 case ABS_EXPR:
8115 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8116 return fold_abs_const (arg0, type);
8117 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8118 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8119 /* Convert fabs((double)float) into (double)fabsf(float). */
8120 else if (TREE_CODE (arg0) == NOP_EXPR
8121 && TREE_CODE (type) == REAL_TYPE)
8123 tree targ0 = strip_float_extensions (arg0);
8124 if (targ0 != arg0)
8125 return fold_convert_loc (loc, type,
8126 fold_build1_loc (loc, ABS_EXPR,
8127 TREE_TYPE (targ0),
8128 targ0));
8130 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8131 else if (TREE_CODE (arg0) == ABS_EXPR)
8132 return arg0;
8133 else if (tree_expr_nonnegative_p (arg0))
8134 return arg0;
8136 /* Strip sign ops from argument. */
8137 if (TREE_CODE (type) == REAL_TYPE)
8139 tem = fold_strip_sign_ops (arg0);
8140 if (tem)
8141 return fold_build1_loc (loc, ABS_EXPR, type,
8142 fold_convert_loc (loc, type, tem));
8144 return NULL_TREE;
8146 case CONJ_EXPR:
8147 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8148 return fold_convert_loc (loc, type, arg0);
8149 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8151 tree itype = TREE_TYPE (type);
8152 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8153 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8154 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8155 negate_expr (ipart));
8157 if (TREE_CODE (arg0) == COMPLEX_CST)
8159 tree itype = TREE_TYPE (type);
8160 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8161 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8162 return build_complex (type, rpart, negate_expr (ipart));
8164 if (TREE_CODE (arg0) == CONJ_EXPR)
8165 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8166 return NULL_TREE;
8168 case BIT_NOT_EXPR:
8169 if (TREE_CODE (arg0) == INTEGER_CST)
8170 return fold_not_const (arg0, type);
8171 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8172 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8173 /* Convert ~ (-A) to A - 1. */
8174 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8175 return fold_build2_loc (loc, MINUS_EXPR, type,
8176 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8177 build_int_cst (type, 1));
8178 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8179 else if (INTEGRAL_TYPE_P (type)
8180 && ((TREE_CODE (arg0) == MINUS_EXPR
8181 && integer_onep (TREE_OPERAND (arg0, 1)))
8182 || (TREE_CODE (arg0) == PLUS_EXPR
8183 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8184 return fold_build1_loc (loc, NEGATE_EXPR, type,
8185 fold_convert_loc (loc, type,
8186 TREE_OPERAND (arg0, 0)));
8187 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8188 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8189 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8190 fold_convert_loc (loc, type,
8191 TREE_OPERAND (arg0, 0)))))
8192 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8193 fold_convert_loc (loc, type,
8194 TREE_OPERAND (arg0, 1)));
8195 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8196 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8197 fold_convert_loc (loc, type,
8198 TREE_OPERAND (arg0, 1)))))
8199 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8200 fold_convert_loc (loc, type,
8201 TREE_OPERAND (arg0, 0)), tem);
8202 /* Perform BIT_NOT_EXPR on each element individually. */
8203 else if (TREE_CODE (arg0) == VECTOR_CST)
8205 tree *elements;
8206 tree elem;
8207 unsigned count = VECTOR_CST_NELTS (arg0), i;
8209 elements = XALLOCAVEC (tree, count);
8210 for (i = 0; i < count; i++)
8212 elem = VECTOR_CST_ELT (arg0, i);
8213 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8214 if (elem == NULL_TREE)
8215 break;
8216 elements[i] = elem;
8218 if (i == count)
8219 return build_vector (type, elements);
8222 return NULL_TREE;
8224 case TRUTH_NOT_EXPR:
8225 /* The argument to invert_truthvalue must have Boolean type. */
8226 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8227 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8229 /* Note that the operand of this must be an int
8230 and its values must be 0 or 1.
8231 ("true" is a fixed value perhaps depending on the language,
8232 but we don't handle values other than 1 correctly yet.) */
8233 tem = fold_truth_not_expr (loc, arg0);
8234 if (!tem)
8235 return NULL_TREE;
8236 return fold_convert_loc (loc, type, tem);
8238 case REALPART_EXPR:
8239 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8240 return fold_convert_loc (loc, type, arg0);
8241 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8242 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8243 TREE_OPERAND (arg0, 1));
8244 if (TREE_CODE (arg0) == COMPLEX_CST)
8245 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8246 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8248 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8249 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8250 fold_build1_loc (loc, REALPART_EXPR, itype,
8251 TREE_OPERAND (arg0, 0)),
8252 fold_build1_loc (loc, REALPART_EXPR, itype,
8253 TREE_OPERAND (arg0, 1)));
8254 return fold_convert_loc (loc, type, tem);
8256 if (TREE_CODE (arg0) == CONJ_EXPR)
8258 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8259 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8260 TREE_OPERAND (arg0, 0));
8261 return fold_convert_loc (loc, type, tem);
8263 if (TREE_CODE (arg0) == CALL_EXPR)
8265 tree fn = get_callee_fndecl (arg0);
8266 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8267 switch (DECL_FUNCTION_CODE (fn))
8269 CASE_FLT_FN (BUILT_IN_CEXPI):
8270 fn = mathfn_built_in (type, BUILT_IN_COS);
8271 if (fn)
8272 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8273 break;
8275 default:
8276 break;
8279 return NULL_TREE;
8281 case IMAGPART_EXPR:
8282 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8283 return build_zero_cst (type);
8284 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8285 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8286 TREE_OPERAND (arg0, 0));
8287 if (TREE_CODE (arg0) == COMPLEX_CST)
8288 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8289 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8291 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8292 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8293 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8294 TREE_OPERAND (arg0, 0)),
8295 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8296 TREE_OPERAND (arg0, 1)));
8297 return fold_convert_loc (loc, type, tem);
8299 if (TREE_CODE (arg0) == CONJ_EXPR)
8301 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8302 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8303 return fold_convert_loc (loc, type, negate_expr (tem));
8305 if (TREE_CODE (arg0) == CALL_EXPR)
8307 tree fn = get_callee_fndecl (arg0);
8308 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8309 switch (DECL_FUNCTION_CODE (fn))
8311 CASE_FLT_FN (BUILT_IN_CEXPI):
8312 fn = mathfn_built_in (type, BUILT_IN_SIN);
8313 if (fn)
8314 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8315 break;
8317 default:
8318 break;
8321 return NULL_TREE;
8323 case INDIRECT_REF:
8324 /* Fold *&X to X if X is an lvalue. */
8325 if (TREE_CODE (op0) == ADDR_EXPR)
8327 tree op00 = TREE_OPERAND (op0, 0);
8328 if ((TREE_CODE (op00) == VAR_DECL
8329 || TREE_CODE (op00) == PARM_DECL
8330 || TREE_CODE (op00) == RESULT_DECL)
8331 && !TREE_READONLY (op00))
8332 return op00;
8334 return NULL_TREE;
8336 case VEC_UNPACK_LO_EXPR:
8337 case VEC_UNPACK_HI_EXPR:
8338 case VEC_UNPACK_FLOAT_LO_EXPR:
8339 case VEC_UNPACK_FLOAT_HI_EXPR:
8341 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8342 tree *elts;
8343 enum tree_code subcode;
8345 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8346 if (TREE_CODE (arg0) != VECTOR_CST)
8347 return NULL_TREE;
8349 elts = XALLOCAVEC (tree, nelts * 2);
8350 if (!vec_cst_ctor_to_array (arg0, elts))
8351 return NULL_TREE;
8353 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8354 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8355 elts += nelts;
8357 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8358 subcode = NOP_EXPR;
8359 else
8360 subcode = FLOAT_EXPR;
8362 for (i = 0; i < nelts; i++)
8364 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8365 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8366 return NULL_TREE;
8369 return build_vector (type, elts);
8372 case REDUC_MIN_EXPR:
8373 case REDUC_MAX_EXPR:
8374 case REDUC_PLUS_EXPR:
8376 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8377 tree *elts;
8378 enum tree_code subcode;
8380 if (TREE_CODE (op0) != VECTOR_CST)
8381 return NULL_TREE;
8383 elts = XALLOCAVEC (tree, nelts);
8384 if (!vec_cst_ctor_to_array (op0, elts))
8385 return NULL_TREE;
8387 switch (code)
8389 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8390 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8391 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8392 default: gcc_unreachable ();
8395 for (i = 1; i < nelts; i++)
8397 elts[0] = const_binop (subcode, elts[0], elts[i]);
8398 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8399 return NULL_TREE;
8400 elts[i] = build_zero_cst (TREE_TYPE (type));
8403 return build_vector (type, elts);
8406 default:
8407 return NULL_TREE;
8408 } /* switch (code) */
8412 /* If the operation was a conversion do _not_ mark a resulting constant
8413 with TREE_OVERFLOW if the original constant was not. These conversions
8414 have implementation defined behavior and retaining the TREE_OVERFLOW
8415 flag here would confuse later passes such as VRP. */
8416 tree
8417 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8418 tree type, tree op0)
8420 tree res = fold_unary_loc (loc, code, type, op0);
8421 if (res
8422 && TREE_CODE (res) == INTEGER_CST
8423 && TREE_CODE (op0) == INTEGER_CST
8424 && CONVERT_EXPR_CODE_P (code))
8425 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8427 return res;
8430 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8431 operands OP0 and OP1. LOC is the location of the resulting expression.
8432 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8433 Return the folded expression if folding is successful. Otherwise,
8434 return NULL_TREE. */
8435 static tree
8436 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8437 tree arg0, tree arg1, tree op0, tree op1)
8439 tree tem;
8441 /* We only do these simplifications if we are optimizing. */
8442 if (!optimize)
8443 return NULL_TREE;
8445 /* Check for things like (A || B) && (A || C). We can convert this
8446 to A || (B && C). Note that either operator can be any of the four
8447 truth and/or operations and the transformation will still be
8448 valid. Also note that we only care about order for the
8449 ANDIF and ORIF operators. If B contains side effects, this
8450 might change the truth-value of A. */
8451 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8452 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8453 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8454 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8455 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8456 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8458 tree a00 = TREE_OPERAND (arg0, 0);
8459 tree a01 = TREE_OPERAND (arg0, 1);
8460 tree a10 = TREE_OPERAND (arg1, 0);
8461 tree a11 = TREE_OPERAND (arg1, 1);
8462 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8463 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8464 && (code == TRUTH_AND_EXPR
8465 || code == TRUTH_OR_EXPR));
8467 if (operand_equal_p (a00, a10, 0))
8468 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8469 fold_build2_loc (loc, code, type, a01, a11));
8470 else if (commutative && operand_equal_p (a00, a11, 0))
8471 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8472 fold_build2_loc (loc, code, type, a01, a10));
8473 else if (commutative && operand_equal_p (a01, a10, 0))
8474 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8475 fold_build2_loc (loc, code, type, a00, a11));
8477 /* This case if tricky because we must either have commutative
8478 operators or else A10 must not have side-effects. */
8480 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8481 && operand_equal_p (a01, a11, 0))
8482 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8483 fold_build2_loc (loc, code, type, a00, a10),
8484 a01);
8487 /* See if we can build a range comparison. */
8488 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8489 return tem;
8491 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8492 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8494 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8495 if (tem)
8496 return fold_build2_loc (loc, code, type, tem, arg1);
8499 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8500 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8502 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8503 if (tem)
8504 return fold_build2_loc (loc, code, type, arg0, tem);
8507 /* Check for the possibility of merging component references. If our
8508 lhs is another similar operation, try to merge its rhs with our
8509 rhs. Then try to merge our lhs and rhs. */
8510 if (TREE_CODE (arg0) == code
8511 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8512 TREE_OPERAND (arg0, 1), arg1)))
8513 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8515 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8516 return tem;
8518 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8519 && (code == TRUTH_AND_EXPR
8520 || code == TRUTH_ANDIF_EXPR
8521 || code == TRUTH_OR_EXPR
8522 || code == TRUTH_ORIF_EXPR))
8524 enum tree_code ncode, icode;
8526 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8527 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8528 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8530 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8531 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8532 We don't want to pack more than two leafs to a non-IF AND/OR
8533 expression.
8534 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8535 equal to IF-CODE, then we don't want to add right-hand operand.
8536 If the inner right-hand side of left-hand operand has
8537 side-effects, or isn't simple, then we can't add to it,
8538 as otherwise we might destroy if-sequence. */
8539 if (TREE_CODE (arg0) == icode
8540 && simple_operand_p_2 (arg1)
8541 /* Needed for sequence points to handle trappings, and
8542 side-effects. */
8543 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8545 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8546 arg1);
8547 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8548 tem);
8550 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8551 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8552 else if (TREE_CODE (arg1) == icode
8553 && simple_operand_p_2 (arg0)
8554 /* Needed for sequence points to handle trappings, and
8555 side-effects. */
8556 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8558 tem = fold_build2_loc (loc, ncode, type,
8559 arg0, TREE_OPERAND (arg1, 0));
8560 return fold_build2_loc (loc, icode, type, tem,
8561 TREE_OPERAND (arg1, 1));
8563 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8564 into (A OR B).
8565 For sequence point consistancy, we need to check for trapping,
8566 and side-effects. */
8567 else if (code == icode && simple_operand_p_2 (arg0)
8568 && simple_operand_p_2 (arg1))
8569 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8572 return NULL_TREE;
8575 /* Fold a binary expression of code CODE and type TYPE with operands
8576 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8577 Return the folded expression if folding is successful. Otherwise,
8578 return NULL_TREE. */
8580 static tree
8581 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8583 enum tree_code compl_code;
8585 if (code == MIN_EXPR)
8586 compl_code = MAX_EXPR;
8587 else if (code == MAX_EXPR)
8588 compl_code = MIN_EXPR;
8589 else
8590 gcc_unreachable ();
8592 /* MIN (MAX (a, b), b) == b. */
8593 if (TREE_CODE (op0) == compl_code
8594 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8595 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8597 /* MIN (MAX (b, a), b) == b. */
8598 if (TREE_CODE (op0) == compl_code
8599 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8600 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8601 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8603 /* MIN (a, MAX (a, b)) == a. */
8604 if (TREE_CODE (op1) == compl_code
8605 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8606 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8607 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8609 /* MIN (a, MAX (b, a)) == a. */
8610 if (TREE_CODE (op1) == compl_code
8611 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8612 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8613 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8615 return NULL_TREE;
8618 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8619 by changing CODE to reduce the magnitude of constants involved in
8620 ARG0 of the comparison.
8621 Returns a canonicalized comparison tree if a simplification was
8622 possible, otherwise returns NULL_TREE.
8623 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8624 valid if signed overflow is undefined. */
8626 static tree
8627 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8628 tree arg0, tree arg1,
8629 bool *strict_overflow_p)
8631 enum tree_code code0 = TREE_CODE (arg0);
8632 tree t, cst0 = NULL_TREE;
8633 int sgn0;
8634 bool swap = false;
8636 /* Match A +- CST code arg1 and CST code arg1. We can change the
8637 first form only if overflow is undefined. */
8638 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8639 /* In principle pointers also have undefined overflow behavior,
8640 but that causes problems elsewhere. */
8641 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8642 && (code0 == MINUS_EXPR
8643 || code0 == PLUS_EXPR)
8644 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8645 || code0 == INTEGER_CST))
8646 return NULL_TREE;
8648 /* Identify the constant in arg0 and its sign. */
8649 if (code0 == INTEGER_CST)
8650 cst0 = arg0;
8651 else
8652 cst0 = TREE_OPERAND (arg0, 1);
8653 sgn0 = tree_int_cst_sgn (cst0);
8655 /* Overflowed constants and zero will cause problems. */
8656 if (integer_zerop (cst0)
8657 || TREE_OVERFLOW (cst0))
8658 return NULL_TREE;
8660 /* See if we can reduce the magnitude of the constant in
8661 arg0 by changing the comparison code. */
8662 if (code0 == INTEGER_CST)
8664 /* CST <= arg1 -> CST-1 < arg1. */
8665 if (code == LE_EXPR && sgn0 == 1)
8666 code = LT_EXPR;
8667 /* -CST < arg1 -> -CST-1 <= arg1. */
8668 else if (code == LT_EXPR && sgn0 == -1)
8669 code = LE_EXPR;
8670 /* CST > arg1 -> CST-1 >= arg1. */
8671 else if (code == GT_EXPR && sgn0 == 1)
8672 code = GE_EXPR;
8673 /* -CST >= arg1 -> -CST-1 > arg1. */
8674 else if (code == GE_EXPR && sgn0 == -1)
8675 code = GT_EXPR;
8676 else
8677 return NULL_TREE;
8678 /* arg1 code' CST' might be more canonical. */
8679 swap = true;
8681 else
8683 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8684 if (code == LT_EXPR
8685 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8686 code = LE_EXPR;
8687 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8688 else if (code == GT_EXPR
8689 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8690 code = GE_EXPR;
8691 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8692 else if (code == LE_EXPR
8693 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8694 code = LT_EXPR;
8695 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8696 else if (code == GE_EXPR
8697 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8698 code = GT_EXPR;
8699 else
8700 return NULL_TREE;
8701 *strict_overflow_p = true;
8704 /* Now build the constant reduced in magnitude. But not if that
8705 would produce one outside of its types range. */
8706 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8707 && ((sgn0 == 1
8708 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8709 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8710 || (sgn0 == -1
8711 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8712 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8713 /* We cannot swap the comparison here as that would cause us to
8714 endlessly recurse. */
8715 return NULL_TREE;
8717 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8718 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8719 if (code0 != INTEGER_CST)
8720 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8721 t = fold_convert (TREE_TYPE (arg1), t);
8723 /* If swapping might yield to a more canonical form, do so. */
8724 if (swap)
8725 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8726 else
8727 return fold_build2_loc (loc, code, type, t, arg1);
8730 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8731 overflow further. Try to decrease the magnitude of constants involved
8732 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8733 and put sole constants at the second argument position.
8734 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8736 static tree
8737 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8738 tree arg0, tree arg1)
8740 tree t;
8741 bool strict_overflow_p;
8742 const char * const warnmsg = G_("assuming signed overflow does not occur "
8743 "when reducing constant in comparison");
8745 /* Try canonicalization by simplifying arg0. */
8746 strict_overflow_p = false;
8747 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8748 &strict_overflow_p);
8749 if (t)
8751 if (strict_overflow_p)
8752 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8753 return t;
8756 /* Try canonicalization by simplifying arg1 using the swapped
8757 comparison. */
8758 code = swap_tree_comparison (code);
8759 strict_overflow_p = false;
8760 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8761 &strict_overflow_p);
8762 if (t && strict_overflow_p)
8763 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8764 return t;
8767 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8768 space. This is used to avoid issuing overflow warnings for
8769 expressions like &p->x which can not wrap. */
8771 static bool
8772 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8774 double_int di_offset, total;
8776 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8777 return true;
8779 if (bitpos < 0)
8780 return true;
8782 if (offset == NULL_TREE)
8783 di_offset = double_int_zero;
8784 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8785 return true;
8786 else
8787 di_offset = TREE_INT_CST (offset);
8789 bool overflow;
8790 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8791 total = di_offset.add_with_sign (units, true, &overflow);
8792 if (overflow)
8793 return true;
8795 if (total.high != 0)
8796 return true;
8798 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8799 if (size <= 0)
8800 return true;
8802 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8803 array. */
8804 if (TREE_CODE (base) == ADDR_EXPR)
8806 HOST_WIDE_INT base_size;
8808 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8809 if (base_size > 0 && size < base_size)
8810 size = base_size;
8813 return total.low > (unsigned HOST_WIDE_INT) size;
8816 /* Subroutine of fold_binary. This routine performs all of the
8817 transformations that are common to the equality/inequality
8818 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8819 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8820 fold_binary should call fold_binary. Fold a comparison with
8821 tree code CODE and type TYPE with operands OP0 and OP1. Return
8822 the folded comparison or NULL_TREE. */
8824 static tree
8825 fold_comparison (location_t loc, enum tree_code code, tree type,
8826 tree op0, tree op1)
8828 tree arg0, arg1, tem;
8830 arg0 = op0;
8831 arg1 = op1;
8833 STRIP_SIGN_NOPS (arg0);
8834 STRIP_SIGN_NOPS (arg1);
8836 tem = fold_relational_const (code, type, arg0, arg1);
8837 if (tem != NULL_TREE)
8838 return tem;
8840 /* If one arg is a real or integer constant, put it last. */
8841 if (tree_swap_operands_p (arg0, arg1, true))
8842 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8844 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8845 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8846 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8847 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8848 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8849 && (TREE_CODE (arg1) == INTEGER_CST
8850 && !TREE_OVERFLOW (arg1)))
8852 tree const1 = TREE_OPERAND (arg0, 1);
8853 tree const2 = arg1;
8854 tree variable = TREE_OPERAND (arg0, 0);
8855 tree lhs;
8856 int lhs_add;
8857 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8859 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8860 TREE_TYPE (arg1), const2, const1);
8862 /* If the constant operation overflowed this can be
8863 simplified as a comparison against INT_MAX/INT_MIN. */
8864 if (TREE_CODE (lhs) == INTEGER_CST
8865 && TREE_OVERFLOW (lhs))
8867 int const1_sgn = tree_int_cst_sgn (const1);
8868 enum tree_code code2 = code;
8870 /* Get the sign of the constant on the lhs if the
8871 operation were VARIABLE + CONST1. */
8872 if (TREE_CODE (arg0) == MINUS_EXPR)
8873 const1_sgn = -const1_sgn;
8875 /* The sign of the constant determines if we overflowed
8876 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8877 Canonicalize to the INT_MIN overflow by swapping the comparison
8878 if necessary. */
8879 if (const1_sgn == -1)
8880 code2 = swap_tree_comparison (code);
8882 /* We now can look at the canonicalized case
8883 VARIABLE + 1 CODE2 INT_MIN
8884 and decide on the result. */
8885 if (code2 == LT_EXPR
8886 || code2 == LE_EXPR
8887 || code2 == EQ_EXPR)
8888 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8889 else if (code2 == NE_EXPR
8890 || code2 == GE_EXPR
8891 || code2 == GT_EXPR)
8892 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8895 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8896 && (TREE_CODE (lhs) != INTEGER_CST
8897 || !TREE_OVERFLOW (lhs)))
8899 if (code != EQ_EXPR && code != NE_EXPR)
8900 fold_overflow_warning ("assuming signed overflow does not occur "
8901 "when changing X +- C1 cmp C2 to "
8902 "X cmp C1 +- C2",
8903 WARN_STRICT_OVERFLOW_COMPARISON);
8904 return fold_build2_loc (loc, code, type, variable, lhs);
8908 /* For comparisons of pointers we can decompose it to a compile time
8909 comparison of the base objects and the offsets into the object.
8910 This requires at least one operand being an ADDR_EXPR or a
8911 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8912 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8913 && (TREE_CODE (arg0) == ADDR_EXPR
8914 || TREE_CODE (arg1) == ADDR_EXPR
8915 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8916 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8918 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8919 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8920 enum machine_mode mode;
8921 int volatilep, unsignedp;
8922 bool indirect_base0 = false, indirect_base1 = false;
8924 /* Get base and offset for the access. Strip ADDR_EXPR for
8925 get_inner_reference, but put it back by stripping INDIRECT_REF
8926 off the base object if possible. indirect_baseN will be true
8927 if baseN is not an address but refers to the object itself. */
8928 base0 = arg0;
8929 if (TREE_CODE (arg0) == ADDR_EXPR)
8931 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8932 &bitsize, &bitpos0, &offset0, &mode,
8933 &unsignedp, &volatilep, false);
8934 if (TREE_CODE (base0) == INDIRECT_REF)
8935 base0 = TREE_OPERAND (base0, 0);
8936 else
8937 indirect_base0 = true;
8939 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8941 base0 = TREE_OPERAND (arg0, 0);
8942 STRIP_SIGN_NOPS (base0);
8943 if (TREE_CODE (base0) == ADDR_EXPR)
8945 base0 = TREE_OPERAND (base0, 0);
8946 indirect_base0 = true;
8948 offset0 = TREE_OPERAND (arg0, 1);
8949 if (host_integerp (offset0, 0))
8951 HOST_WIDE_INT off = size_low_cst (offset0);
8952 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8953 * BITS_PER_UNIT)
8954 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8956 bitpos0 = off * BITS_PER_UNIT;
8957 offset0 = NULL_TREE;
8962 base1 = arg1;
8963 if (TREE_CODE (arg1) == ADDR_EXPR)
8965 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8966 &bitsize, &bitpos1, &offset1, &mode,
8967 &unsignedp, &volatilep, false);
8968 if (TREE_CODE (base1) == INDIRECT_REF)
8969 base1 = TREE_OPERAND (base1, 0);
8970 else
8971 indirect_base1 = true;
8973 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8975 base1 = TREE_OPERAND (arg1, 0);
8976 STRIP_SIGN_NOPS (base1);
8977 if (TREE_CODE (base1) == ADDR_EXPR)
8979 base1 = TREE_OPERAND (base1, 0);
8980 indirect_base1 = true;
8982 offset1 = TREE_OPERAND (arg1, 1);
8983 if (host_integerp (offset1, 0))
8985 HOST_WIDE_INT off = size_low_cst (offset1);
8986 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8987 * BITS_PER_UNIT)
8988 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8990 bitpos1 = off * BITS_PER_UNIT;
8991 offset1 = NULL_TREE;
8996 /* A local variable can never be pointed to by
8997 the default SSA name of an incoming parameter. */
8998 if ((TREE_CODE (arg0) == ADDR_EXPR
8999 && indirect_base0
9000 && TREE_CODE (base0) == VAR_DECL
9001 && auto_var_in_fn_p (base0, current_function_decl)
9002 && !indirect_base1
9003 && TREE_CODE (base1) == SSA_NAME
9004 && SSA_NAME_IS_DEFAULT_DEF (base1)
9005 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9006 || (TREE_CODE (arg1) == ADDR_EXPR
9007 && indirect_base1
9008 && TREE_CODE (base1) == VAR_DECL
9009 && auto_var_in_fn_p (base1, current_function_decl)
9010 && !indirect_base0
9011 && TREE_CODE (base0) == SSA_NAME
9012 && SSA_NAME_IS_DEFAULT_DEF (base0)
9013 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9015 if (code == NE_EXPR)
9016 return constant_boolean_node (1, type);
9017 else if (code == EQ_EXPR)
9018 return constant_boolean_node (0, type);
9020 /* If we have equivalent bases we might be able to simplify. */
9021 else if (indirect_base0 == indirect_base1
9022 && operand_equal_p (base0, base1, 0))
9024 /* We can fold this expression to a constant if the non-constant
9025 offset parts are equal. */
9026 if ((offset0 == offset1
9027 || (offset0 && offset1
9028 && operand_equal_p (offset0, offset1, 0)))
9029 && (code == EQ_EXPR
9030 || code == NE_EXPR
9031 || (indirect_base0 && DECL_P (base0))
9032 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9035 if (code != EQ_EXPR
9036 && code != NE_EXPR
9037 && bitpos0 != bitpos1
9038 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9039 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9040 fold_overflow_warning (("assuming pointer wraparound does not "
9041 "occur when comparing P +- C1 with "
9042 "P +- C2"),
9043 WARN_STRICT_OVERFLOW_CONDITIONAL);
9045 switch (code)
9047 case EQ_EXPR:
9048 return constant_boolean_node (bitpos0 == bitpos1, type);
9049 case NE_EXPR:
9050 return constant_boolean_node (bitpos0 != bitpos1, type);
9051 case LT_EXPR:
9052 return constant_boolean_node (bitpos0 < bitpos1, type);
9053 case LE_EXPR:
9054 return constant_boolean_node (bitpos0 <= bitpos1, type);
9055 case GE_EXPR:
9056 return constant_boolean_node (bitpos0 >= bitpos1, type);
9057 case GT_EXPR:
9058 return constant_boolean_node (bitpos0 > bitpos1, type);
9059 default:;
9062 /* We can simplify the comparison to a comparison of the variable
9063 offset parts if the constant offset parts are equal.
9064 Be careful to use signed sizetype here because otherwise we
9065 mess with array offsets in the wrong way. This is possible
9066 because pointer arithmetic is restricted to retain within an
9067 object and overflow on pointer differences is undefined as of
9068 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9069 else if (bitpos0 == bitpos1
9070 && ((code == EQ_EXPR || code == NE_EXPR)
9071 || (indirect_base0 && DECL_P (base0))
9072 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9074 /* By converting to signed sizetype we cover middle-end pointer
9075 arithmetic which operates on unsigned pointer types of size
9076 type size and ARRAY_REF offsets which are properly sign or
9077 zero extended from their type in case it is narrower than
9078 sizetype. */
9079 if (offset0 == NULL_TREE)
9080 offset0 = build_int_cst (ssizetype, 0);
9081 else
9082 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9083 if (offset1 == NULL_TREE)
9084 offset1 = build_int_cst (ssizetype, 0);
9085 else
9086 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9088 if (code != EQ_EXPR
9089 && code != NE_EXPR
9090 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9091 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9092 fold_overflow_warning (("assuming pointer wraparound does not "
9093 "occur when comparing P +- C1 with "
9094 "P +- C2"),
9095 WARN_STRICT_OVERFLOW_COMPARISON);
9097 return fold_build2_loc (loc, code, type, offset0, offset1);
9100 /* For non-equal bases we can simplify if they are addresses
9101 of local binding decls or constants. */
9102 else if (indirect_base0 && indirect_base1
9103 /* We know that !operand_equal_p (base0, base1, 0)
9104 because the if condition was false. But make
9105 sure two decls are not the same. */
9106 && base0 != base1
9107 && TREE_CODE (arg0) == ADDR_EXPR
9108 && TREE_CODE (arg1) == ADDR_EXPR
9109 && (((TREE_CODE (base0) == VAR_DECL
9110 || TREE_CODE (base0) == PARM_DECL)
9111 && (targetm.binds_local_p (base0)
9112 || CONSTANT_CLASS_P (base1)))
9113 || CONSTANT_CLASS_P (base0))
9114 && (((TREE_CODE (base1) == VAR_DECL
9115 || TREE_CODE (base1) == PARM_DECL)
9116 && (targetm.binds_local_p (base1)
9117 || CONSTANT_CLASS_P (base0)))
9118 || CONSTANT_CLASS_P (base1)))
9120 if (code == EQ_EXPR)
9121 return omit_two_operands_loc (loc, type, boolean_false_node,
9122 arg0, arg1);
9123 else if (code == NE_EXPR)
9124 return omit_two_operands_loc (loc, type, boolean_true_node,
9125 arg0, arg1);
9127 /* For equal offsets we can simplify to a comparison of the
9128 base addresses. */
9129 else if (bitpos0 == bitpos1
9130 && (indirect_base0
9131 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9132 && (indirect_base1
9133 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9134 && ((offset0 == offset1)
9135 || (offset0 && offset1
9136 && operand_equal_p (offset0, offset1, 0))))
9138 if (indirect_base0)
9139 base0 = build_fold_addr_expr_loc (loc, base0);
9140 if (indirect_base1)
9141 base1 = build_fold_addr_expr_loc (loc, base1);
9142 return fold_build2_loc (loc, code, type, base0, base1);
9146 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9147 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9148 the resulting offset is smaller in absolute value than the
9149 original one. */
9150 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9151 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9152 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9153 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9154 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9155 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9156 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9158 tree const1 = TREE_OPERAND (arg0, 1);
9159 tree const2 = TREE_OPERAND (arg1, 1);
9160 tree variable1 = TREE_OPERAND (arg0, 0);
9161 tree variable2 = TREE_OPERAND (arg1, 0);
9162 tree cst;
9163 const char * const warnmsg = G_("assuming signed overflow does not "
9164 "occur when combining constants around "
9165 "a comparison");
9167 /* Put the constant on the side where it doesn't overflow and is
9168 of lower absolute value than before. */
9169 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9170 ? MINUS_EXPR : PLUS_EXPR,
9171 const2, const1);
9172 if (!TREE_OVERFLOW (cst)
9173 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9175 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9176 return fold_build2_loc (loc, code, type,
9177 variable1,
9178 fold_build2_loc (loc,
9179 TREE_CODE (arg1), TREE_TYPE (arg1),
9180 variable2, cst));
9183 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9184 ? MINUS_EXPR : PLUS_EXPR,
9185 const1, const2);
9186 if (!TREE_OVERFLOW (cst)
9187 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9189 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9190 return fold_build2_loc (loc, code, type,
9191 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9192 variable1, cst),
9193 variable2);
9197 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9198 signed arithmetic case. That form is created by the compiler
9199 often enough for folding it to be of value. One example is in
9200 computing loop trip counts after Operator Strength Reduction. */
9201 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9202 && TREE_CODE (arg0) == MULT_EXPR
9203 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9204 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9205 && integer_zerop (arg1))
9207 tree const1 = TREE_OPERAND (arg0, 1);
9208 tree const2 = arg1; /* zero */
9209 tree variable1 = TREE_OPERAND (arg0, 0);
9210 enum tree_code cmp_code = code;
9212 /* Handle unfolded multiplication by zero. */
9213 if (integer_zerop (const1))
9214 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9216 fold_overflow_warning (("assuming signed overflow does not occur when "
9217 "eliminating multiplication in comparison "
9218 "with zero"),
9219 WARN_STRICT_OVERFLOW_COMPARISON);
9221 /* If const1 is negative we swap the sense of the comparison. */
9222 if (tree_int_cst_sgn (const1) < 0)
9223 cmp_code = swap_tree_comparison (cmp_code);
9225 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9228 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9229 if (tem)
9230 return tem;
9232 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9234 tree targ0 = strip_float_extensions (arg0);
9235 tree targ1 = strip_float_extensions (arg1);
9236 tree newtype = TREE_TYPE (targ0);
9238 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9239 newtype = TREE_TYPE (targ1);
9241 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9242 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9243 return fold_build2_loc (loc, code, type,
9244 fold_convert_loc (loc, newtype, targ0),
9245 fold_convert_loc (loc, newtype, targ1));
9247 /* (-a) CMP (-b) -> b CMP a */
9248 if (TREE_CODE (arg0) == NEGATE_EXPR
9249 && TREE_CODE (arg1) == NEGATE_EXPR)
9250 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9251 TREE_OPERAND (arg0, 0));
9253 if (TREE_CODE (arg1) == REAL_CST)
9255 REAL_VALUE_TYPE cst;
9256 cst = TREE_REAL_CST (arg1);
9258 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9259 if (TREE_CODE (arg0) == NEGATE_EXPR)
9260 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9261 TREE_OPERAND (arg0, 0),
9262 build_real (TREE_TYPE (arg1),
9263 real_value_negate (&cst)));
9265 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9266 /* a CMP (-0) -> a CMP 0 */
9267 if (REAL_VALUE_MINUS_ZERO (cst))
9268 return fold_build2_loc (loc, code, type, arg0,
9269 build_real (TREE_TYPE (arg1), dconst0));
9271 /* x != NaN is always true, other ops are always false. */
9272 if (REAL_VALUE_ISNAN (cst)
9273 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9275 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9276 return omit_one_operand_loc (loc, type, tem, arg0);
9279 /* Fold comparisons against infinity. */
9280 if (REAL_VALUE_ISINF (cst)
9281 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9283 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9284 if (tem != NULL_TREE)
9285 return tem;
9289 /* If this is a comparison of a real constant with a PLUS_EXPR
9290 or a MINUS_EXPR of a real constant, we can convert it into a
9291 comparison with a revised real constant as long as no overflow
9292 occurs when unsafe_math_optimizations are enabled. */
9293 if (flag_unsafe_math_optimizations
9294 && TREE_CODE (arg1) == REAL_CST
9295 && (TREE_CODE (arg0) == PLUS_EXPR
9296 || TREE_CODE (arg0) == MINUS_EXPR)
9297 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9298 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9299 ? MINUS_EXPR : PLUS_EXPR,
9300 arg1, TREE_OPERAND (arg0, 1)))
9301 && !TREE_OVERFLOW (tem))
9302 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9304 /* Likewise, we can simplify a comparison of a real constant with
9305 a MINUS_EXPR whose first operand is also a real constant, i.e.
9306 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9307 floating-point types only if -fassociative-math is set. */
9308 if (flag_associative_math
9309 && TREE_CODE (arg1) == REAL_CST
9310 && TREE_CODE (arg0) == MINUS_EXPR
9311 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9312 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9313 arg1))
9314 && !TREE_OVERFLOW (tem))
9315 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9316 TREE_OPERAND (arg0, 1), tem);
9318 /* Fold comparisons against built-in math functions. */
9319 if (TREE_CODE (arg1) == REAL_CST
9320 && flag_unsafe_math_optimizations
9321 && ! flag_errno_math)
9323 enum built_in_function fcode = builtin_mathfn_code (arg0);
9325 if (fcode != END_BUILTINS)
9327 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9328 if (tem != NULL_TREE)
9329 return tem;
9334 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9335 && CONVERT_EXPR_P (arg0))
9337 /* If we are widening one operand of an integer comparison,
9338 see if the other operand is similarly being widened. Perhaps we
9339 can do the comparison in the narrower type. */
9340 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9341 if (tem)
9342 return tem;
9344 /* Or if we are changing signedness. */
9345 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9346 if (tem)
9347 return tem;
9350 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9351 constant, we can simplify it. */
9352 if (TREE_CODE (arg1) == INTEGER_CST
9353 && (TREE_CODE (arg0) == MIN_EXPR
9354 || TREE_CODE (arg0) == MAX_EXPR)
9355 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9357 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9358 if (tem)
9359 return tem;
9362 /* Simplify comparison of something with itself. (For IEEE
9363 floating-point, we can only do some of these simplifications.) */
9364 if (operand_equal_p (arg0, arg1, 0))
9366 switch (code)
9368 case EQ_EXPR:
9369 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9370 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9371 return constant_boolean_node (1, type);
9372 break;
9374 case GE_EXPR:
9375 case LE_EXPR:
9376 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9377 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9378 return constant_boolean_node (1, type);
9379 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9381 case NE_EXPR:
9382 /* For NE, we can only do this simplification if integer
9383 or we don't honor IEEE floating point NaNs. */
9384 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9385 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9386 break;
9387 /* ... fall through ... */
9388 case GT_EXPR:
9389 case LT_EXPR:
9390 return constant_boolean_node (0, type);
9391 default:
9392 gcc_unreachable ();
9396 /* If we are comparing an expression that just has comparisons
9397 of two integer values, arithmetic expressions of those comparisons,
9398 and constants, we can simplify it. There are only three cases
9399 to check: the two values can either be equal, the first can be
9400 greater, or the second can be greater. Fold the expression for
9401 those three values. Since each value must be 0 or 1, we have
9402 eight possibilities, each of which corresponds to the constant 0
9403 or 1 or one of the six possible comparisons.
9405 This handles common cases like (a > b) == 0 but also handles
9406 expressions like ((x > y) - (y > x)) > 0, which supposedly
9407 occur in macroized code. */
9409 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9411 tree cval1 = 0, cval2 = 0;
9412 int save_p = 0;
9414 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9415 /* Don't handle degenerate cases here; they should already
9416 have been handled anyway. */
9417 && cval1 != 0 && cval2 != 0
9418 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9419 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9420 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9421 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9422 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9423 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9424 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9426 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9427 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9429 /* We can't just pass T to eval_subst in case cval1 or cval2
9430 was the same as ARG1. */
9432 tree high_result
9433 = fold_build2_loc (loc, code, type,
9434 eval_subst (loc, arg0, cval1, maxval,
9435 cval2, minval),
9436 arg1);
9437 tree equal_result
9438 = fold_build2_loc (loc, code, type,
9439 eval_subst (loc, arg0, cval1, maxval,
9440 cval2, maxval),
9441 arg1);
9442 tree low_result
9443 = fold_build2_loc (loc, code, type,
9444 eval_subst (loc, arg0, cval1, minval,
9445 cval2, maxval),
9446 arg1);
9448 /* All three of these results should be 0 or 1. Confirm they are.
9449 Then use those values to select the proper code to use. */
9451 if (TREE_CODE (high_result) == INTEGER_CST
9452 && TREE_CODE (equal_result) == INTEGER_CST
9453 && TREE_CODE (low_result) == INTEGER_CST)
9455 /* Make a 3-bit mask with the high-order bit being the
9456 value for `>', the next for '=', and the low for '<'. */
9457 switch ((integer_onep (high_result) * 4)
9458 + (integer_onep (equal_result) * 2)
9459 + integer_onep (low_result))
9461 case 0:
9462 /* Always false. */
9463 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9464 case 1:
9465 code = LT_EXPR;
9466 break;
9467 case 2:
9468 code = EQ_EXPR;
9469 break;
9470 case 3:
9471 code = LE_EXPR;
9472 break;
9473 case 4:
9474 code = GT_EXPR;
9475 break;
9476 case 5:
9477 code = NE_EXPR;
9478 break;
9479 case 6:
9480 code = GE_EXPR;
9481 break;
9482 case 7:
9483 /* Always true. */
9484 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9487 if (save_p)
9489 tem = save_expr (build2 (code, type, cval1, cval2));
9490 SET_EXPR_LOCATION (tem, loc);
9491 return tem;
9493 return fold_build2_loc (loc, code, type, cval1, cval2);
9498 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9499 into a single range test. */
9500 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9501 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9502 && TREE_CODE (arg1) == INTEGER_CST
9503 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9504 && !integer_zerop (TREE_OPERAND (arg0, 1))
9505 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9506 && !TREE_OVERFLOW (arg1))
9508 tem = fold_div_compare (loc, code, type, arg0, arg1);
9509 if (tem != NULL_TREE)
9510 return tem;
9513 /* Fold ~X op ~Y as Y op X. */
9514 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9515 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9517 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9518 return fold_build2_loc (loc, code, type,
9519 fold_convert_loc (loc, cmp_type,
9520 TREE_OPERAND (arg1, 0)),
9521 TREE_OPERAND (arg0, 0));
9524 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9525 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9526 && TREE_CODE (arg1) == INTEGER_CST)
9528 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9529 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9530 TREE_OPERAND (arg0, 0),
9531 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9532 fold_convert_loc (loc, cmp_type, arg1)));
9535 return NULL_TREE;
9539 /* Subroutine of fold_binary. Optimize complex multiplications of the
9540 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9541 argument EXPR represents the expression "z" of type TYPE. */
9543 static tree
9544 fold_mult_zconjz (location_t loc, tree type, tree expr)
9546 tree itype = TREE_TYPE (type);
9547 tree rpart, ipart, tem;
9549 if (TREE_CODE (expr) == COMPLEX_EXPR)
9551 rpart = TREE_OPERAND (expr, 0);
9552 ipart = TREE_OPERAND (expr, 1);
9554 else if (TREE_CODE (expr) == COMPLEX_CST)
9556 rpart = TREE_REALPART (expr);
9557 ipart = TREE_IMAGPART (expr);
9559 else
9561 expr = save_expr (expr);
9562 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9563 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9566 rpart = save_expr (rpart);
9567 ipart = save_expr (ipart);
9568 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9569 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9570 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9571 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9572 build_zero_cst (itype));
9576 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9577 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9578 guarantees that P and N have the same least significant log2(M) bits.
9579 N is not otherwise constrained. In particular, N is not normalized to
9580 0 <= N < M as is common. In general, the precise value of P is unknown.
9581 M is chosen as large as possible such that constant N can be determined.
9583 Returns M and sets *RESIDUE to N.
9585 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9586 account. This is not always possible due to PR 35705.
9589 static unsigned HOST_WIDE_INT
9590 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9591 bool allow_func_align)
9593 enum tree_code code;
9595 *residue = 0;
9597 code = TREE_CODE (expr);
9598 if (code == ADDR_EXPR)
9600 unsigned int bitalign;
9601 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9602 *residue /= BITS_PER_UNIT;
9603 return bitalign / BITS_PER_UNIT;
9605 else if (code == POINTER_PLUS_EXPR)
9607 tree op0, op1;
9608 unsigned HOST_WIDE_INT modulus;
9609 enum tree_code inner_code;
9611 op0 = TREE_OPERAND (expr, 0);
9612 STRIP_NOPS (op0);
9613 modulus = get_pointer_modulus_and_residue (op0, residue,
9614 allow_func_align);
9616 op1 = TREE_OPERAND (expr, 1);
9617 STRIP_NOPS (op1);
9618 inner_code = TREE_CODE (op1);
9619 if (inner_code == INTEGER_CST)
9621 *residue += TREE_INT_CST_LOW (op1);
9622 return modulus;
9624 else if (inner_code == MULT_EXPR)
9626 op1 = TREE_OPERAND (op1, 1);
9627 if (TREE_CODE (op1) == INTEGER_CST)
9629 unsigned HOST_WIDE_INT align;
9631 /* Compute the greatest power-of-2 divisor of op1. */
9632 align = TREE_INT_CST_LOW (op1);
9633 align &= -align;
9635 /* If align is non-zero and less than *modulus, replace
9636 *modulus with align., If align is 0, then either op1 is 0
9637 or the greatest power-of-2 divisor of op1 doesn't fit in an
9638 unsigned HOST_WIDE_INT. In either case, no additional
9639 constraint is imposed. */
9640 if (align)
9641 modulus = MIN (modulus, align);
9643 return modulus;
9648 /* If we get here, we were unable to determine anything useful about the
9649 expression. */
9650 return 1;
9653 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9654 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9656 static bool
9657 vec_cst_ctor_to_array (tree arg, tree *elts)
9659 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9661 if (TREE_CODE (arg) == VECTOR_CST)
9663 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9664 elts[i] = VECTOR_CST_ELT (arg, i);
9666 else if (TREE_CODE (arg) == CONSTRUCTOR)
9668 constructor_elt *elt;
9670 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9671 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9672 return false;
9673 else
9674 elts[i] = elt->value;
9676 else
9677 return false;
9678 for (; i < nelts; i++)
9679 elts[i]
9680 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9681 return true;
9684 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9685 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9686 NULL_TREE otherwise. */
9688 static tree
9689 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9691 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9692 tree *elts;
9693 bool need_ctor = false;
9695 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9696 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9697 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9698 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9699 return NULL_TREE;
9701 elts = XALLOCAVEC (tree, nelts * 3);
9702 if (!vec_cst_ctor_to_array (arg0, elts)
9703 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9704 return NULL_TREE;
9706 for (i = 0; i < nelts; i++)
9708 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9709 need_ctor = true;
9710 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9713 if (need_ctor)
9715 vec<constructor_elt, va_gc> *v;
9716 vec_alloc (v, nelts);
9717 for (i = 0; i < nelts; i++)
9718 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9719 return build_constructor (type, v);
9721 else
9722 return build_vector (type, &elts[2 * nelts]);
9725 /* Try to fold a pointer difference of type TYPE two address expressions of
9726 array references AREF0 and AREF1 using location LOC. Return a
9727 simplified expression for the difference or NULL_TREE. */
9729 static tree
9730 fold_addr_of_array_ref_difference (location_t loc, tree type,
9731 tree aref0, tree aref1)
9733 tree base0 = TREE_OPERAND (aref0, 0);
9734 tree base1 = TREE_OPERAND (aref1, 0);
9735 tree base_offset = build_int_cst (type, 0);
9737 /* If the bases are array references as well, recurse. If the bases
9738 are pointer indirections compute the difference of the pointers.
9739 If the bases are equal, we are set. */
9740 if ((TREE_CODE (base0) == ARRAY_REF
9741 && TREE_CODE (base1) == ARRAY_REF
9742 && (base_offset
9743 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9744 || (INDIRECT_REF_P (base0)
9745 && INDIRECT_REF_P (base1)
9746 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9747 TREE_OPERAND (base0, 0),
9748 TREE_OPERAND (base1, 0))))
9749 || operand_equal_p (base0, base1, 0))
9751 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9752 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9753 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9754 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9755 return fold_build2_loc (loc, PLUS_EXPR, type,
9756 base_offset,
9757 fold_build2_loc (loc, MULT_EXPR, type,
9758 diff, esz));
9760 return NULL_TREE;
9763 /* If the real or vector real constant CST of type TYPE has an exact
9764 inverse, return it, else return NULL. */
9766 static tree
9767 exact_inverse (tree type, tree cst)
9769 REAL_VALUE_TYPE r;
9770 tree unit_type, *elts;
9771 enum machine_mode mode;
9772 unsigned vec_nelts, i;
9774 switch (TREE_CODE (cst))
9776 case REAL_CST:
9777 r = TREE_REAL_CST (cst);
9779 if (exact_real_inverse (TYPE_MODE (type), &r))
9780 return build_real (type, r);
9782 return NULL_TREE;
9784 case VECTOR_CST:
9785 vec_nelts = VECTOR_CST_NELTS (cst);
9786 elts = XALLOCAVEC (tree, vec_nelts);
9787 unit_type = TREE_TYPE (type);
9788 mode = TYPE_MODE (unit_type);
9790 for (i = 0; i < vec_nelts; i++)
9792 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9793 if (!exact_real_inverse (mode, &r))
9794 return NULL_TREE;
9795 elts[i] = build_real (unit_type, r);
9798 return build_vector (type, elts);
9800 default:
9801 return NULL_TREE;
9805 /* Fold a binary expression of code CODE and type TYPE with operands
9806 OP0 and OP1. LOC is the location of the resulting expression.
9807 Return the folded expression if folding is successful. Otherwise,
9808 return NULL_TREE. */
9810 tree
9811 fold_binary_loc (location_t loc,
9812 enum tree_code code, tree type, tree op0, tree op1)
9814 enum tree_code_class kind = TREE_CODE_CLASS (code);
9815 tree arg0, arg1, tem;
9816 tree t1 = NULL_TREE;
9817 bool strict_overflow_p;
9819 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9820 && TREE_CODE_LENGTH (code) == 2
9821 && op0 != NULL_TREE
9822 && op1 != NULL_TREE);
9824 arg0 = op0;
9825 arg1 = op1;
9827 /* Strip any conversions that don't change the mode. This is
9828 safe for every expression, except for a comparison expression
9829 because its signedness is derived from its operands. So, in
9830 the latter case, only strip conversions that don't change the
9831 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9832 preserved.
9834 Note that this is done as an internal manipulation within the
9835 constant folder, in order to find the simplest representation
9836 of the arguments so that their form can be studied. In any
9837 cases, the appropriate type conversions should be put back in
9838 the tree that will get out of the constant folder. */
9840 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9842 STRIP_SIGN_NOPS (arg0);
9843 STRIP_SIGN_NOPS (arg1);
9845 else
9847 STRIP_NOPS (arg0);
9848 STRIP_NOPS (arg1);
9851 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9852 constant but we can't do arithmetic on them. */
9853 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9854 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9855 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9856 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9857 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9858 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9860 if (kind == tcc_binary)
9862 /* Make sure type and arg0 have the same saturating flag. */
9863 gcc_assert (TYPE_SATURATING (type)
9864 == TYPE_SATURATING (TREE_TYPE (arg0)));
9865 tem = const_binop (code, arg0, arg1);
9867 else if (kind == tcc_comparison)
9868 tem = fold_relational_const (code, type, arg0, arg1);
9869 else
9870 tem = NULL_TREE;
9872 if (tem != NULL_TREE)
9874 if (TREE_TYPE (tem) != type)
9875 tem = fold_convert_loc (loc, type, tem);
9876 return tem;
9880 /* If this is a commutative operation, and ARG0 is a constant, move it
9881 to ARG1 to reduce the number of tests below. */
9882 if (commutative_tree_code (code)
9883 && tree_swap_operands_p (arg0, arg1, true))
9884 return fold_build2_loc (loc, code, type, op1, op0);
9886 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9888 First check for cases where an arithmetic operation is applied to a
9889 compound, conditional, or comparison operation. Push the arithmetic
9890 operation inside the compound or conditional to see if any folding
9891 can then be done. Convert comparison to conditional for this purpose.
9892 The also optimizes non-constant cases that used to be done in
9893 expand_expr.
9895 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9896 one of the operands is a comparison and the other is a comparison, a
9897 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9898 code below would make the expression more complex. Change it to a
9899 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9900 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9902 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9903 || code == EQ_EXPR || code == NE_EXPR)
9904 && TREE_CODE (type) != VECTOR_TYPE
9905 && ((truth_value_p (TREE_CODE (arg0))
9906 && (truth_value_p (TREE_CODE (arg1))
9907 || (TREE_CODE (arg1) == BIT_AND_EXPR
9908 && integer_onep (TREE_OPERAND (arg1, 1)))))
9909 || (truth_value_p (TREE_CODE (arg1))
9910 && (truth_value_p (TREE_CODE (arg0))
9911 || (TREE_CODE (arg0) == BIT_AND_EXPR
9912 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9914 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9915 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9916 : TRUTH_XOR_EXPR,
9917 boolean_type_node,
9918 fold_convert_loc (loc, boolean_type_node, arg0),
9919 fold_convert_loc (loc, boolean_type_node, arg1));
9921 if (code == EQ_EXPR)
9922 tem = invert_truthvalue_loc (loc, tem);
9924 return fold_convert_loc (loc, type, tem);
9927 if (TREE_CODE_CLASS (code) == tcc_binary
9928 || TREE_CODE_CLASS (code) == tcc_comparison)
9930 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9932 tem = fold_build2_loc (loc, code, type,
9933 fold_convert_loc (loc, TREE_TYPE (op0),
9934 TREE_OPERAND (arg0, 1)), op1);
9935 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9936 tem);
9938 if (TREE_CODE (arg1) == COMPOUND_EXPR
9939 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9941 tem = fold_build2_loc (loc, code, type, op0,
9942 fold_convert_loc (loc, TREE_TYPE (op1),
9943 TREE_OPERAND (arg1, 1)));
9944 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9945 tem);
9948 if (TREE_CODE (arg0) == COND_EXPR
9949 || TREE_CODE (arg0) == VEC_COND_EXPR
9950 || COMPARISON_CLASS_P (arg0))
9952 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9953 arg0, arg1,
9954 /*cond_first_p=*/1);
9955 if (tem != NULL_TREE)
9956 return tem;
9959 if (TREE_CODE (arg1) == COND_EXPR
9960 || TREE_CODE (arg1) == VEC_COND_EXPR
9961 || COMPARISON_CLASS_P (arg1))
9963 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9964 arg1, arg0,
9965 /*cond_first_p=*/0);
9966 if (tem != NULL_TREE)
9967 return tem;
9971 switch (code)
9973 case MEM_REF:
9974 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9975 if (TREE_CODE (arg0) == ADDR_EXPR
9976 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9978 tree iref = TREE_OPERAND (arg0, 0);
9979 return fold_build2 (MEM_REF, type,
9980 TREE_OPERAND (iref, 0),
9981 int_const_binop (PLUS_EXPR, arg1,
9982 TREE_OPERAND (iref, 1)));
9985 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9986 if (TREE_CODE (arg0) == ADDR_EXPR
9987 && handled_component_p (TREE_OPERAND (arg0, 0)))
9989 tree base;
9990 HOST_WIDE_INT coffset;
9991 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9992 &coffset);
9993 if (!base)
9994 return NULL_TREE;
9995 return fold_build2 (MEM_REF, type,
9996 build_fold_addr_expr (base),
9997 int_const_binop (PLUS_EXPR, arg1,
9998 size_int (coffset)));
10001 return NULL_TREE;
10003 case POINTER_PLUS_EXPR:
10004 /* 0 +p index -> (type)index */
10005 if (integer_zerop (arg0))
10006 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10008 /* PTR +p 0 -> PTR */
10009 if (integer_zerop (arg1))
10010 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10012 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10013 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10014 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10015 return fold_convert_loc (loc, type,
10016 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10017 fold_convert_loc (loc, sizetype,
10018 arg1),
10019 fold_convert_loc (loc, sizetype,
10020 arg0)));
10022 /* (PTR +p B) +p A -> PTR +p (B + A) */
10023 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10025 tree inner;
10026 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10027 tree arg00 = TREE_OPERAND (arg0, 0);
10028 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10029 arg01, fold_convert_loc (loc, sizetype, arg1));
10030 return fold_convert_loc (loc, type,
10031 fold_build_pointer_plus_loc (loc,
10032 arg00, inner));
10035 /* PTR_CST +p CST -> CST1 */
10036 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10037 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10038 fold_convert_loc (loc, type, arg1));
10040 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10041 of the array. Loop optimizer sometimes produce this type of
10042 expressions. */
10043 if (TREE_CODE (arg0) == ADDR_EXPR)
10045 tem = try_move_mult_to_index (loc, arg0,
10046 fold_convert_loc (loc,
10047 ssizetype, arg1));
10048 if (tem)
10049 return fold_convert_loc (loc, type, tem);
10052 return NULL_TREE;
10054 case PLUS_EXPR:
10055 /* A + (-B) -> A - B */
10056 if (TREE_CODE (arg1) == NEGATE_EXPR)
10057 return fold_build2_loc (loc, MINUS_EXPR, type,
10058 fold_convert_loc (loc, type, arg0),
10059 fold_convert_loc (loc, type,
10060 TREE_OPERAND (arg1, 0)));
10061 /* (-A) + B -> B - A */
10062 if (TREE_CODE (arg0) == NEGATE_EXPR
10063 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10064 return fold_build2_loc (loc, MINUS_EXPR, type,
10065 fold_convert_loc (loc, type, arg1),
10066 fold_convert_loc (loc, type,
10067 TREE_OPERAND (arg0, 0)));
10069 if (INTEGRAL_TYPE_P (type))
10071 /* Convert ~A + 1 to -A. */
10072 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10073 && integer_onep (arg1))
10074 return fold_build1_loc (loc, NEGATE_EXPR, type,
10075 fold_convert_loc (loc, type,
10076 TREE_OPERAND (arg0, 0)));
10078 /* ~X + X is -1. */
10079 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10080 && !TYPE_OVERFLOW_TRAPS (type))
10082 tree tem = TREE_OPERAND (arg0, 0);
10084 STRIP_NOPS (tem);
10085 if (operand_equal_p (tem, arg1, 0))
10087 t1 = build_int_cst_type (type, -1);
10088 return omit_one_operand_loc (loc, type, t1, arg1);
10092 /* X + ~X is -1. */
10093 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10094 && !TYPE_OVERFLOW_TRAPS (type))
10096 tree tem = TREE_OPERAND (arg1, 0);
10098 STRIP_NOPS (tem);
10099 if (operand_equal_p (arg0, tem, 0))
10101 t1 = build_int_cst_type (type, -1);
10102 return omit_one_operand_loc (loc, type, t1, arg0);
10106 /* X + (X / CST) * -CST is X % CST. */
10107 if (TREE_CODE (arg1) == MULT_EXPR
10108 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10109 && operand_equal_p (arg0,
10110 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10112 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10113 tree cst1 = TREE_OPERAND (arg1, 1);
10114 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10115 cst1, cst0);
10116 if (sum && integer_zerop (sum))
10117 return fold_convert_loc (loc, type,
10118 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10119 TREE_TYPE (arg0), arg0,
10120 cst0));
10124 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10125 one. Make sure the type is not saturating and has the signedness of
10126 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10127 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10128 if ((TREE_CODE (arg0) == MULT_EXPR
10129 || TREE_CODE (arg1) == MULT_EXPR)
10130 && !TYPE_SATURATING (type)
10131 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10132 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10133 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10135 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10136 if (tem)
10137 return tem;
10140 if (! FLOAT_TYPE_P (type))
10142 if (integer_zerop (arg1))
10143 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10145 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10146 with a constant, and the two constants have no bits in common,
10147 we should treat this as a BIT_IOR_EXPR since this may produce more
10148 simplifications. */
10149 if (TREE_CODE (arg0) == BIT_AND_EXPR
10150 && TREE_CODE (arg1) == BIT_AND_EXPR
10151 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10152 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10153 && integer_zerop (const_binop (BIT_AND_EXPR,
10154 TREE_OPERAND (arg0, 1),
10155 TREE_OPERAND (arg1, 1))))
10157 code = BIT_IOR_EXPR;
10158 goto bit_ior;
10161 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10162 (plus (plus (mult) (mult)) (foo)) so that we can
10163 take advantage of the factoring cases below. */
10164 if (TYPE_OVERFLOW_WRAPS (type)
10165 && (((TREE_CODE (arg0) == PLUS_EXPR
10166 || TREE_CODE (arg0) == MINUS_EXPR)
10167 && TREE_CODE (arg1) == MULT_EXPR)
10168 || ((TREE_CODE (arg1) == PLUS_EXPR
10169 || TREE_CODE (arg1) == MINUS_EXPR)
10170 && TREE_CODE (arg0) == MULT_EXPR)))
10172 tree parg0, parg1, parg, marg;
10173 enum tree_code pcode;
10175 if (TREE_CODE (arg1) == MULT_EXPR)
10176 parg = arg0, marg = arg1;
10177 else
10178 parg = arg1, marg = arg0;
10179 pcode = TREE_CODE (parg);
10180 parg0 = TREE_OPERAND (parg, 0);
10181 parg1 = TREE_OPERAND (parg, 1);
10182 STRIP_NOPS (parg0);
10183 STRIP_NOPS (parg1);
10185 if (TREE_CODE (parg0) == MULT_EXPR
10186 && TREE_CODE (parg1) != MULT_EXPR)
10187 return fold_build2_loc (loc, pcode, type,
10188 fold_build2_loc (loc, PLUS_EXPR, type,
10189 fold_convert_loc (loc, type,
10190 parg0),
10191 fold_convert_loc (loc, type,
10192 marg)),
10193 fold_convert_loc (loc, type, parg1));
10194 if (TREE_CODE (parg0) != MULT_EXPR
10195 && TREE_CODE (parg1) == MULT_EXPR)
10196 return
10197 fold_build2_loc (loc, PLUS_EXPR, type,
10198 fold_convert_loc (loc, type, parg0),
10199 fold_build2_loc (loc, pcode, type,
10200 fold_convert_loc (loc, type, marg),
10201 fold_convert_loc (loc, type,
10202 parg1)));
10205 else
10207 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10208 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10209 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10211 /* Likewise if the operands are reversed. */
10212 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10213 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10215 /* Convert X + -C into X - C. */
10216 if (TREE_CODE (arg1) == REAL_CST
10217 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10219 tem = fold_negate_const (arg1, type);
10220 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10221 return fold_build2_loc (loc, MINUS_EXPR, type,
10222 fold_convert_loc (loc, type, arg0),
10223 fold_convert_loc (loc, type, tem));
10226 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10227 to __complex__ ( x, y ). This is not the same for SNaNs or
10228 if signed zeros are involved. */
10229 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10230 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10231 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10233 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10234 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10235 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10236 bool arg0rz = false, arg0iz = false;
10237 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10238 || (arg0i && (arg0iz = real_zerop (arg0i))))
10240 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10241 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10242 if (arg0rz && arg1i && real_zerop (arg1i))
10244 tree rp = arg1r ? arg1r
10245 : build1 (REALPART_EXPR, rtype, arg1);
10246 tree ip = arg0i ? arg0i
10247 : build1 (IMAGPART_EXPR, rtype, arg0);
10248 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10250 else if (arg0iz && arg1r && real_zerop (arg1r))
10252 tree rp = arg0r ? arg0r
10253 : build1 (REALPART_EXPR, rtype, arg0);
10254 tree ip = arg1i ? arg1i
10255 : build1 (IMAGPART_EXPR, rtype, arg1);
10256 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10261 if (flag_unsafe_math_optimizations
10262 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10263 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10264 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10265 return tem;
10267 /* Convert x+x into x*2.0. */
10268 if (operand_equal_p (arg0, arg1, 0)
10269 && SCALAR_FLOAT_TYPE_P (type))
10270 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10271 build_real (type, dconst2));
10273 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10274 We associate floats only if the user has specified
10275 -fassociative-math. */
10276 if (flag_associative_math
10277 && TREE_CODE (arg1) == PLUS_EXPR
10278 && TREE_CODE (arg0) != MULT_EXPR)
10280 tree tree10 = TREE_OPERAND (arg1, 0);
10281 tree tree11 = TREE_OPERAND (arg1, 1);
10282 if (TREE_CODE (tree11) == MULT_EXPR
10283 && TREE_CODE (tree10) == MULT_EXPR)
10285 tree tree0;
10286 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10287 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10290 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10291 We associate floats only if the user has specified
10292 -fassociative-math. */
10293 if (flag_associative_math
10294 && TREE_CODE (arg0) == PLUS_EXPR
10295 && TREE_CODE (arg1) != MULT_EXPR)
10297 tree tree00 = TREE_OPERAND (arg0, 0);
10298 tree tree01 = TREE_OPERAND (arg0, 1);
10299 if (TREE_CODE (tree01) == MULT_EXPR
10300 && TREE_CODE (tree00) == MULT_EXPR)
10302 tree tree0;
10303 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10304 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10309 bit_rotate:
10310 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10311 is a rotate of A by C1 bits. */
10312 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10313 is a rotate of A by B bits. */
10315 enum tree_code code0, code1;
10316 tree rtype;
10317 code0 = TREE_CODE (arg0);
10318 code1 = TREE_CODE (arg1);
10319 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10320 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10321 && operand_equal_p (TREE_OPERAND (arg0, 0),
10322 TREE_OPERAND (arg1, 0), 0)
10323 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10324 TYPE_UNSIGNED (rtype))
10325 /* Only create rotates in complete modes. Other cases are not
10326 expanded properly. */
10327 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10329 tree tree01, tree11;
10330 enum tree_code code01, code11;
10332 tree01 = TREE_OPERAND (arg0, 1);
10333 tree11 = TREE_OPERAND (arg1, 1);
10334 STRIP_NOPS (tree01);
10335 STRIP_NOPS (tree11);
10336 code01 = TREE_CODE (tree01);
10337 code11 = TREE_CODE (tree11);
10338 if (code01 == INTEGER_CST
10339 && code11 == INTEGER_CST
10340 && TREE_INT_CST_HIGH (tree01) == 0
10341 && TREE_INT_CST_HIGH (tree11) == 0
10342 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10343 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10345 tem = build2_loc (loc, LROTATE_EXPR,
10346 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10347 TREE_OPERAND (arg0, 0),
10348 code0 == LSHIFT_EXPR ? tree01 : tree11);
10349 return fold_convert_loc (loc, type, tem);
10351 else if (code11 == MINUS_EXPR)
10353 tree tree110, tree111;
10354 tree110 = TREE_OPERAND (tree11, 0);
10355 tree111 = TREE_OPERAND (tree11, 1);
10356 STRIP_NOPS (tree110);
10357 STRIP_NOPS (tree111);
10358 if (TREE_CODE (tree110) == INTEGER_CST
10359 && 0 == compare_tree_int (tree110,
10360 TYPE_PRECISION
10361 (TREE_TYPE (TREE_OPERAND
10362 (arg0, 0))))
10363 && operand_equal_p (tree01, tree111, 0))
10364 return
10365 fold_convert_loc (loc, type,
10366 build2 ((code0 == LSHIFT_EXPR
10367 ? LROTATE_EXPR
10368 : RROTATE_EXPR),
10369 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10370 TREE_OPERAND (arg0, 0), tree01));
10372 else if (code01 == MINUS_EXPR)
10374 tree tree010, tree011;
10375 tree010 = TREE_OPERAND (tree01, 0);
10376 tree011 = TREE_OPERAND (tree01, 1);
10377 STRIP_NOPS (tree010);
10378 STRIP_NOPS (tree011);
10379 if (TREE_CODE (tree010) == INTEGER_CST
10380 && 0 == compare_tree_int (tree010,
10381 TYPE_PRECISION
10382 (TREE_TYPE (TREE_OPERAND
10383 (arg0, 0))))
10384 && operand_equal_p (tree11, tree011, 0))
10385 return fold_convert_loc
10386 (loc, type,
10387 build2 ((code0 != LSHIFT_EXPR
10388 ? LROTATE_EXPR
10389 : RROTATE_EXPR),
10390 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10391 TREE_OPERAND (arg0, 0), tree11));
10396 associate:
10397 /* In most languages, can't associate operations on floats through
10398 parentheses. Rather than remember where the parentheses were, we
10399 don't associate floats at all, unless the user has specified
10400 -fassociative-math.
10401 And, we need to make sure type is not saturating. */
10403 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10404 && !TYPE_SATURATING (type))
10406 tree var0, con0, lit0, minus_lit0;
10407 tree var1, con1, lit1, minus_lit1;
10408 tree atype = type;
10409 bool ok = true;
10411 /* Split both trees into variables, constants, and literals. Then
10412 associate each group together, the constants with literals,
10413 then the result with variables. This increases the chances of
10414 literals being recombined later and of generating relocatable
10415 expressions for the sum of a constant and literal. */
10416 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10417 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10418 code == MINUS_EXPR);
10420 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10421 if (code == MINUS_EXPR)
10422 code = PLUS_EXPR;
10424 /* With undefined overflow prefer doing association in a type
10425 which wraps on overflow, if that is one of the operand types. */
10426 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10427 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10429 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10430 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10431 atype = TREE_TYPE (arg0);
10432 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10433 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10434 atype = TREE_TYPE (arg1);
10435 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10438 /* With undefined overflow we can only associate constants with one
10439 variable, and constants whose association doesn't overflow. */
10440 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10441 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10443 if (var0 && var1)
10445 tree tmp0 = var0;
10446 tree tmp1 = var1;
10448 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10449 tmp0 = TREE_OPERAND (tmp0, 0);
10450 if (CONVERT_EXPR_P (tmp0)
10451 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10452 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10453 <= TYPE_PRECISION (atype)))
10454 tmp0 = TREE_OPERAND (tmp0, 0);
10455 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10456 tmp1 = TREE_OPERAND (tmp1, 0);
10457 if (CONVERT_EXPR_P (tmp1)
10458 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10459 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10460 <= TYPE_PRECISION (atype)))
10461 tmp1 = TREE_OPERAND (tmp1, 0);
10462 /* The only case we can still associate with two variables
10463 is if they are the same, modulo negation and bit-pattern
10464 preserving conversions. */
10465 if (!operand_equal_p (tmp0, tmp1, 0))
10466 ok = false;
10470 /* Only do something if we found more than two objects. Otherwise,
10471 nothing has changed and we risk infinite recursion. */
10472 if (ok
10473 && (2 < ((var0 != 0) + (var1 != 0)
10474 + (con0 != 0) + (con1 != 0)
10475 + (lit0 != 0) + (lit1 != 0)
10476 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10478 bool any_overflows = false;
10479 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10480 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10481 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10482 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10483 var0 = associate_trees (loc, var0, var1, code, atype);
10484 con0 = associate_trees (loc, con0, con1, code, atype);
10485 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10486 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10487 code, atype);
10489 /* Preserve the MINUS_EXPR if the negative part of the literal is
10490 greater than the positive part. Otherwise, the multiplicative
10491 folding code (i.e extract_muldiv) may be fooled in case
10492 unsigned constants are subtracted, like in the following
10493 example: ((X*2 + 4) - 8U)/2. */
10494 if (minus_lit0 && lit0)
10496 if (TREE_CODE (lit0) == INTEGER_CST
10497 && TREE_CODE (minus_lit0) == INTEGER_CST
10498 && tree_int_cst_lt (lit0, minus_lit0))
10500 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10501 MINUS_EXPR, atype);
10502 lit0 = 0;
10504 else
10506 lit0 = associate_trees (loc, lit0, minus_lit0,
10507 MINUS_EXPR, atype);
10508 minus_lit0 = 0;
10512 /* Don't introduce overflows through reassociation. */
10513 if (!any_overflows
10514 && ((lit0 && TREE_OVERFLOW (lit0))
10515 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10516 return NULL_TREE;
10518 if (minus_lit0)
10520 if (con0 == 0)
10521 return
10522 fold_convert_loc (loc, type,
10523 associate_trees (loc, var0, minus_lit0,
10524 MINUS_EXPR, atype));
10525 else
10527 con0 = associate_trees (loc, con0, minus_lit0,
10528 MINUS_EXPR, atype);
10529 return
10530 fold_convert_loc (loc, type,
10531 associate_trees (loc, var0, con0,
10532 PLUS_EXPR, atype));
10536 con0 = associate_trees (loc, con0, lit0, code, atype);
10537 return
10538 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10539 code, atype));
10543 return NULL_TREE;
10545 case MINUS_EXPR:
10546 /* Pointer simplifications for subtraction, simple reassociations. */
10547 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10549 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10550 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10551 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10553 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10554 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10555 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10556 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10557 return fold_build2_loc (loc, PLUS_EXPR, type,
10558 fold_build2_loc (loc, MINUS_EXPR, type,
10559 arg00, arg10),
10560 fold_build2_loc (loc, MINUS_EXPR, type,
10561 arg01, arg11));
10563 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10564 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10566 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10567 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10568 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10569 fold_convert_loc (loc, type, arg1));
10570 if (tmp)
10571 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10574 /* A - (-B) -> A + B */
10575 if (TREE_CODE (arg1) == NEGATE_EXPR)
10576 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10577 fold_convert_loc (loc, type,
10578 TREE_OPERAND (arg1, 0)));
10579 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10580 if (TREE_CODE (arg0) == NEGATE_EXPR
10581 && (FLOAT_TYPE_P (type)
10582 || INTEGRAL_TYPE_P (type))
10583 && negate_expr_p (arg1)
10584 && reorder_operands_p (arg0, arg1))
10585 return fold_build2_loc (loc, MINUS_EXPR, type,
10586 fold_convert_loc (loc, type,
10587 negate_expr (arg1)),
10588 fold_convert_loc (loc, type,
10589 TREE_OPERAND (arg0, 0)));
10590 /* Convert -A - 1 to ~A. */
10591 if (INTEGRAL_TYPE_P (type)
10592 && TREE_CODE (arg0) == NEGATE_EXPR
10593 && integer_onep (arg1)
10594 && !TYPE_OVERFLOW_TRAPS (type))
10595 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10596 fold_convert_loc (loc, type,
10597 TREE_OPERAND (arg0, 0)));
10599 /* Convert -1 - A to ~A. */
10600 if (INTEGRAL_TYPE_P (type)
10601 && integer_all_onesp (arg0))
10602 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10605 /* X - (X / CST) * CST is X % CST. */
10606 if (INTEGRAL_TYPE_P (type)
10607 && TREE_CODE (arg1) == MULT_EXPR
10608 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10609 && operand_equal_p (arg0,
10610 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10611 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10612 TREE_OPERAND (arg1, 1), 0))
10613 return
10614 fold_convert_loc (loc, type,
10615 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10616 arg0, TREE_OPERAND (arg1, 1)));
10618 if (! FLOAT_TYPE_P (type))
10620 if (integer_zerop (arg0))
10621 return negate_expr (fold_convert_loc (loc, type, arg1));
10622 if (integer_zerop (arg1))
10623 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10625 /* Fold A - (A & B) into ~B & A. */
10626 if (!TREE_SIDE_EFFECTS (arg0)
10627 && TREE_CODE (arg1) == BIT_AND_EXPR)
10629 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10631 tree arg10 = fold_convert_loc (loc, type,
10632 TREE_OPERAND (arg1, 0));
10633 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10634 fold_build1_loc (loc, BIT_NOT_EXPR,
10635 type, arg10),
10636 fold_convert_loc (loc, type, arg0));
10638 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10640 tree arg11 = fold_convert_loc (loc,
10641 type, TREE_OPERAND (arg1, 1));
10642 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10643 fold_build1_loc (loc, BIT_NOT_EXPR,
10644 type, arg11),
10645 fold_convert_loc (loc, type, arg0));
10649 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10650 any power of 2 minus 1. */
10651 if (TREE_CODE (arg0) == BIT_AND_EXPR
10652 && TREE_CODE (arg1) == BIT_AND_EXPR
10653 && operand_equal_p (TREE_OPERAND (arg0, 0),
10654 TREE_OPERAND (arg1, 0), 0))
10656 tree mask0 = TREE_OPERAND (arg0, 1);
10657 tree mask1 = TREE_OPERAND (arg1, 1);
10658 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10660 if (operand_equal_p (tem, mask1, 0))
10662 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10663 TREE_OPERAND (arg0, 0), mask1);
10664 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10669 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10670 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10671 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10673 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10674 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10675 (-ARG1 + ARG0) reduces to -ARG1. */
10676 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10677 return negate_expr (fold_convert_loc (loc, type, arg1));
10679 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10680 __complex__ ( x, -y ). This is not the same for SNaNs or if
10681 signed zeros are involved. */
10682 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10683 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10684 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10686 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10687 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10688 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10689 bool arg0rz = false, arg0iz = false;
10690 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10691 || (arg0i && (arg0iz = real_zerop (arg0i))))
10693 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10694 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10695 if (arg0rz && arg1i && real_zerop (arg1i))
10697 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10698 arg1r ? arg1r
10699 : build1 (REALPART_EXPR, rtype, arg1));
10700 tree ip = arg0i ? arg0i
10701 : build1 (IMAGPART_EXPR, rtype, arg0);
10702 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10704 else if (arg0iz && arg1r && real_zerop (arg1r))
10706 tree rp = arg0r ? arg0r
10707 : build1 (REALPART_EXPR, rtype, arg0);
10708 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10709 arg1i ? arg1i
10710 : build1 (IMAGPART_EXPR, rtype, arg1));
10711 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10716 /* Fold &x - &x. This can happen from &x.foo - &x.
10717 This is unsafe for certain floats even in non-IEEE formats.
10718 In IEEE, it is unsafe because it does wrong for NaNs.
10719 Also note that operand_equal_p is always false if an operand
10720 is volatile. */
10722 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10723 && operand_equal_p (arg0, arg1, 0))
10724 return build_zero_cst (type);
10726 /* A - B -> A + (-B) if B is easily negatable. */
10727 if (negate_expr_p (arg1)
10728 && ((FLOAT_TYPE_P (type)
10729 /* Avoid this transformation if B is a positive REAL_CST. */
10730 && (TREE_CODE (arg1) != REAL_CST
10731 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10732 || INTEGRAL_TYPE_P (type)))
10733 return fold_build2_loc (loc, PLUS_EXPR, type,
10734 fold_convert_loc (loc, type, arg0),
10735 fold_convert_loc (loc, type,
10736 negate_expr (arg1)));
10738 /* Try folding difference of addresses. */
10740 HOST_WIDE_INT diff;
10742 if ((TREE_CODE (arg0) == ADDR_EXPR
10743 || TREE_CODE (arg1) == ADDR_EXPR)
10744 && ptr_difference_const (arg0, arg1, &diff))
10745 return build_int_cst_type (type, diff);
10748 /* Fold &a[i] - &a[j] to i-j. */
10749 if (TREE_CODE (arg0) == ADDR_EXPR
10750 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10751 && TREE_CODE (arg1) == ADDR_EXPR
10752 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10754 tree tem = fold_addr_of_array_ref_difference (loc, type,
10755 TREE_OPERAND (arg0, 0),
10756 TREE_OPERAND (arg1, 0));
10757 if (tem)
10758 return tem;
10761 if (FLOAT_TYPE_P (type)
10762 && flag_unsafe_math_optimizations
10763 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10764 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10765 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10766 return tem;
10768 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10769 one. Make sure the type is not saturating and has the signedness of
10770 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10771 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10772 if ((TREE_CODE (arg0) == MULT_EXPR
10773 || TREE_CODE (arg1) == MULT_EXPR)
10774 && !TYPE_SATURATING (type)
10775 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10776 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10777 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10779 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10780 if (tem)
10781 return tem;
10784 goto associate;
10786 case MULT_EXPR:
10787 /* (-A) * (-B) -> A * B */
10788 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10789 return fold_build2_loc (loc, MULT_EXPR, type,
10790 fold_convert_loc (loc, type,
10791 TREE_OPERAND (arg0, 0)),
10792 fold_convert_loc (loc, type,
10793 negate_expr (arg1)));
10794 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10795 return fold_build2_loc (loc, MULT_EXPR, type,
10796 fold_convert_loc (loc, type,
10797 negate_expr (arg0)),
10798 fold_convert_loc (loc, type,
10799 TREE_OPERAND (arg1, 0)));
10801 if (! FLOAT_TYPE_P (type))
10803 if (integer_zerop (arg1))
10804 return omit_one_operand_loc (loc, type, arg1, arg0);
10805 if (integer_onep (arg1))
10806 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10807 /* Transform x * -1 into -x. Make sure to do the negation
10808 on the original operand with conversions not stripped
10809 because we can only strip non-sign-changing conversions. */
10810 if (integer_all_onesp (arg1))
10811 return fold_convert_loc (loc, type, negate_expr (op0));
10812 /* Transform x * -C into -x * C if x is easily negatable. */
10813 if (TREE_CODE (arg1) == INTEGER_CST
10814 && tree_int_cst_sgn (arg1) == -1
10815 && negate_expr_p (arg0)
10816 && (tem = negate_expr (arg1)) != arg1
10817 && !TREE_OVERFLOW (tem))
10818 return fold_build2_loc (loc, MULT_EXPR, type,
10819 fold_convert_loc (loc, type,
10820 negate_expr (arg0)),
10821 tem);
10823 /* (a * (1 << b)) is (a << b) */
10824 if (TREE_CODE (arg1) == LSHIFT_EXPR
10825 && integer_onep (TREE_OPERAND (arg1, 0)))
10826 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10827 TREE_OPERAND (arg1, 1));
10828 if (TREE_CODE (arg0) == LSHIFT_EXPR
10829 && integer_onep (TREE_OPERAND (arg0, 0)))
10830 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10831 TREE_OPERAND (arg0, 1));
10833 /* (A + A) * C -> A * 2 * C */
10834 if (TREE_CODE (arg0) == PLUS_EXPR
10835 && TREE_CODE (arg1) == INTEGER_CST
10836 && operand_equal_p (TREE_OPERAND (arg0, 0),
10837 TREE_OPERAND (arg0, 1), 0))
10838 return fold_build2_loc (loc, MULT_EXPR, type,
10839 omit_one_operand_loc (loc, type,
10840 TREE_OPERAND (arg0, 0),
10841 TREE_OPERAND (arg0, 1)),
10842 fold_build2_loc (loc, MULT_EXPR, type,
10843 build_int_cst (type, 2) , arg1));
10845 strict_overflow_p = false;
10846 if (TREE_CODE (arg1) == INTEGER_CST
10847 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10848 &strict_overflow_p)))
10850 if (strict_overflow_p)
10851 fold_overflow_warning (("assuming signed overflow does not "
10852 "occur when simplifying "
10853 "multiplication"),
10854 WARN_STRICT_OVERFLOW_MISC);
10855 return fold_convert_loc (loc, type, tem);
10858 /* Optimize z * conj(z) for integer complex numbers. */
10859 if (TREE_CODE (arg0) == CONJ_EXPR
10860 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10861 return fold_mult_zconjz (loc, type, arg1);
10862 if (TREE_CODE (arg1) == CONJ_EXPR
10863 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10864 return fold_mult_zconjz (loc, type, arg0);
10866 else
10868 /* Maybe fold x * 0 to 0. The expressions aren't the same
10869 when x is NaN, since x * 0 is also NaN. Nor are they the
10870 same in modes with signed zeros, since multiplying a
10871 negative value by 0 gives -0, not +0. */
10872 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10873 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10874 && real_zerop (arg1))
10875 return omit_one_operand_loc (loc, type, arg1, arg0);
10876 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10877 Likewise for complex arithmetic with signed zeros. */
10878 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10879 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10880 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10881 && real_onep (arg1))
10882 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10884 /* Transform x * -1.0 into -x. */
10885 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10886 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10887 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10888 && real_minus_onep (arg1))
10889 return fold_convert_loc (loc, type, negate_expr (arg0));
10891 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10892 the result for floating point types due to rounding so it is applied
10893 only if -fassociative-math was specify. */
10894 if (flag_associative_math
10895 && TREE_CODE (arg0) == RDIV_EXPR
10896 && TREE_CODE (arg1) == REAL_CST
10897 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10899 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10900 arg1);
10901 if (tem)
10902 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10903 TREE_OPERAND (arg0, 1));
10906 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10907 if (operand_equal_p (arg0, arg1, 0))
10909 tree tem = fold_strip_sign_ops (arg0);
10910 if (tem != NULL_TREE)
10912 tem = fold_convert_loc (loc, type, tem);
10913 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10917 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10918 This is not the same for NaNs or if signed zeros are
10919 involved. */
10920 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10921 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10922 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10923 && TREE_CODE (arg1) == COMPLEX_CST
10924 && real_zerop (TREE_REALPART (arg1)))
10926 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10927 if (real_onep (TREE_IMAGPART (arg1)))
10928 return
10929 fold_build2_loc (loc, COMPLEX_EXPR, type,
10930 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10931 rtype, arg0)),
10932 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10933 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10934 return
10935 fold_build2_loc (loc, COMPLEX_EXPR, type,
10936 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10937 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10938 rtype, arg0)));
10941 /* Optimize z * conj(z) for floating point complex numbers.
10942 Guarded by flag_unsafe_math_optimizations as non-finite
10943 imaginary components don't produce scalar results. */
10944 if (flag_unsafe_math_optimizations
10945 && TREE_CODE (arg0) == CONJ_EXPR
10946 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10947 return fold_mult_zconjz (loc, type, arg1);
10948 if (flag_unsafe_math_optimizations
10949 && TREE_CODE (arg1) == CONJ_EXPR
10950 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10951 return fold_mult_zconjz (loc, type, arg0);
10953 if (flag_unsafe_math_optimizations)
10955 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10956 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10958 /* Optimizations of root(...)*root(...). */
10959 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10961 tree rootfn, arg;
10962 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10963 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10965 /* Optimize sqrt(x)*sqrt(x) as x. */
10966 if (BUILTIN_SQRT_P (fcode0)
10967 && operand_equal_p (arg00, arg10, 0)
10968 && ! HONOR_SNANS (TYPE_MODE (type)))
10969 return arg00;
10971 /* Optimize root(x)*root(y) as root(x*y). */
10972 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10973 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10974 return build_call_expr_loc (loc, rootfn, 1, arg);
10977 /* Optimize expN(x)*expN(y) as expN(x+y). */
10978 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10980 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10981 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10982 CALL_EXPR_ARG (arg0, 0),
10983 CALL_EXPR_ARG (arg1, 0));
10984 return build_call_expr_loc (loc, expfn, 1, arg);
10987 /* Optimizations of pow(...)*pow(...). */
10988 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10989 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10990 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10992 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10993 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10994 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10995 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10997 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10998 if (operand_equal_p (arg01, arg11, 0))
11000 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11001 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11002 arg00, arg10);
11003 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11006 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11007 if (operand_equal_p (arg00, arg10, 0))
11009 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11010 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11011 arg01, arg11);
11012 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11016 /* Optimize tan(x)*cos(x) as sin(x). */
11017 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11018 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11019 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11020 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11021 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11022 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11023 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11024 CALL_EXPR_ARG (arg1, 0), 0))
11026 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11028 if (sinfn != NULL_TREE)
11029 return build_call_expr_loc (loc, sinfn, 1,
11030 CALL_EXPR_ARG (arg0, 0));
11033 /* Optimize x*pow(x,c) as pow(x,c+1). */
11034 if (fcode1 == BUILT_IN_POW
11035 || fcode1 == BUILT_IN_POWF
11036 || fcode1 == BUILT_IN_POWL)
11038 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11039 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11040 if (TREE_CODE (arg11) == REAL_CST
11041 && !TREE_OVERFLOW (arg11)
11042 && operand_equal_p (arg0, arg10, 0))
11044 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11045 REAL_VALUE_TYPE c;
11046 tree arg;
11048 c = TREE_REAL_CST (arg11);
11049 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11050 arg = build_real (type, c);
11051 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11055 /* Optimize pow(x,c)*x as pow(x,c+1). */
11056 if (fcode0 == BUILT_IN_POW
11057 || fcode0 == BUILT_IN_POWF
11058 || fcode0 == BUILT_IN_POWL)
11060 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11061 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11062 if (TREE_CODE (arg01) == REAL_CST
11063 && !TREE_OVERFLOW (arg01)
11064 && operand_equal_p (arg1, arg00, 0))
11066 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11067 REAL_VALUE_TYPE c;
11068 tree arg;
11070 c = TREE_REAL_CST (arg01);
11071 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11072 arg = build_real (type, c);
11073 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11077 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11078 if (!in_gimple_form
11079 && optimize
11080 && operand_equal_p (arg0, arg1, 0))
11082 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11084 if (powfn)
11086 tree arg = build_real (type, dconst2);
11087 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11092 goto associate;
11094 case BIT_IOR_EXPR:
11095 bit_ior:
11096 if (integer_all_onesp (arg1))
11097 return omit_one_operand_loc (loc, type, arg1, arg0);
11098 if (integer_zerop (arg1))
11099 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11100 if (operand_equal_p (arg0, arg1, 0))
11101 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11103 /* ~X | X is -1. */
11104 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11105 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11107 t1 = build_zero_cst (type);
11108 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11109 return omit_one_operand_loc (loc, type, t1, arg1);
11112 /* X | ~X is -1. */
11113 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11114 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11116 t1 = build_zero_cst (type);
11117 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11118 return omit_one_operand_loc (loc, type, t1, arg0);
11121 /* Canonicalize (X & C1) | C2. */
11122 if (TREE_CODE (arg0) == BIT_AND_EXPR
11123 && TREE_CODE (arg1) == INTEGER_CST
11124 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11126 double_int c1, c2, c3, msk;
11127 int width = TYPE_PRECISION (type), w;
11128 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11129 c2 = tree_to_double_int (arg1);
11131 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11132 if ((c1 & c2) == c1)
11133 return omit_one_operand_loc (loc, type, arg1,
11134 TREE_OPERAND (arg0, 0));
11136 msk = double_int::mask (width);
11138 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11139 if (msk.and_not (c1 | c2).is_zero ())
11140 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11141 TREE_OPERAND (arg0, 0), arg1);
11143 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11144 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11145 mode which allows further optimizations. */
11146 c1 &= msk;
11147 c2 &= msk;
11148 c3 = c1.and_not (c2);
11149 for (w = BITS_PER_UNIT;
11150 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11151 w <<= 1)
11153 unsigned HOST_WIDE_INT mask
11154 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11155 if (((c1.low | c2.low) & mask) == mask
11156 && (c1.low & ~mask) == 0 && c1.high == 0)
11158 c3 = double_int::from_uhwi (mask);
11159 break;
11162 if (c3 != c1)
11163 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11164 fold_build2_loc (loc, BIT_AND_EXPR, type,
11165 TREE_OPERAND (arg0, 0),
11166 double_int_to_tree (type,
11167 c3)),
11168 arg1);
11171 /* (X & Y) | Y is (X, Y). */
11172 if (TREE_CODE (arg0) == BIT_AND_EXPR
11173 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11174 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11175 /* (X & Y) | X is (Y, X). */
11176 if (TREE_CODE (arg0) == BIT_AND_EXPR
11177 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11178 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11179 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11180 /* X | (X & Y) is (Y, X). */
11181 if (TREE_CODE (arg1) == BIT_AND_EXPR
11182 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11183 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11184 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11185 /* X | (Y & X) is (Y, X). */
11186 if (TREE_CODE (arg1) == BIT_AND_EXPR
11187 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11188 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11189 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11191 /* (X & ~Y) | (~X & Y) is X ^ Y */
11192 if (TREE_CODE (arg0) == BIT_AND_EXPR
11193 && TREE_CODE (arg1) == BIT_AND_EXPR)
11195 tree a0, a1, l0, l1, n0, n1;
11197 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11198 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11200 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11201 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11203 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11204 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11206 if ((operand_equal_p (n0, a0, 0)
11207 && operand_equal_p (n1, a1, 0))
11208 || (operand_equal_p (n0, a1, 0)
11209 && operand_equal_p (n1, a0, 0)))
11210 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11213 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11214 if (t1 != NULL_TREE)
11215 return t1;
11217 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11219 This results in more efficient code for machines without a NAND
11220 instruction. Combine will canonicalize to the first form
11221 which will allow use of NAND instructions provided by the
11222 backend if they exist. */
11223 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11224 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11226 return
11227 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11228 build2 (BIT_AND_EXPR, type,
11229 fold_convert_loc (loc, type,
11230 TREE_OPERAND (arg0, 0)),
11231 fold_convert_loc (loc, type,
11232 TREE_OPERAND (arg1, 0))));
11235 /* See if this can be simplified into a rotate first. If that
11236 is unsuccessful continue in the association code. */
11237 goto bit_rotate;
11239 case BIT_XOR_EXPR:
11240 if (integer_zerop (arg1))
11241 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11242 if (integer_all_onesp (arg1))
11243 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11244 if (operand_equal_p (arg0, arg1, 0))
11245 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11247 /* ~X ^ X is -1. */
11248 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11249 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11251 t1 = build_zero_cst (type);
11252 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11253 return omit_one_operand_loc (loc, type, t1, arg1);
11256 /* X ^ ~X is -1. */
11257 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11258 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11260 t1 = build_zero_cst (type);
11261 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11262 return omit_one_operand_loc (loc, type, t1, arg0);
11265 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11266 with a constant, and the two constants have no bits in common,
11267 we should treat this as a BIT_IOR_EXPR since this may produce more
11268 simplifications. */
11269 if (TREE_CODE (arg0) == BIT_AND_EXPR
11270 && TREE_CODE (arg1) == BIT_AND_EXPR
11271 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11272 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11273 && integer_zerop (const_binop (BIT_AND_EXPR,
11274 TREE_OPERAND (arg0, 1),
11275 TREE_OPERAND (arg1, 1))))
11277 code = BIT_IOR_EXPR;
11278 goto bit_ior;
11281 /* (X | Y) ^ X -> Y & ~ X*/
11282 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11283 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11285 tree t2 = TREE_OPERAND (arg0, 1);
11286 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11287 arg1);
11288 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11289 fold_convert_loc (loc, type, t2),
11290 fold_convert_loc (loc, type, t1));
11291 return t1;
11294 /* (Y | X) ^ X -> Y & ~ X*/
11295 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11296 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11298 tree t2 = TREE_OPERAND (arg0, 0);
11299 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11300 arg1);
11301 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11302 fold_convert_loc (loc, type, t2),
11303 fold_convert_loc (loc, type, t1));
11304 return t1;
11307 /* X ^ (X | Y) -> Y & ~ X*/
11308 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11309 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11311 tree t2 = TREE_OPERAND (arg1, 1);
11312 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11313 arg0);
11314 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11315 fold_convert_loc (loc, type, t2),
11316 fold_convert_loc (loc, type, t1));
11317 return t1;
11320 /* X ^ (Y | X) -> Y & ~ X*/
11321 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11322 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11324 tree t2 = TREE_OPERAND (arg1, 0);
11325 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11326 arg0);
11327 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11328 fold_convert_loc (loc, type, t2),
11329 fold_convert_loc (loc, type, t1));
11330 return t1;
11333 /* Convert ~X ^ ~Y to X ^ Y. */
11334 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11335 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11336 return fold_build2_loc (loc, code, type,
11337 fold_convert_loc (loc, type,
11338 TREE_OPERAND (arg0, 0)),
11339 fold_convert_loc (loc, type,
11340 TREE_OPERAND (arg1, 0)));
11342 /* Convert ~X ^ C to X ^ ~C. */
11343 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11344 && TREE_CODE (arg1) == INTEGER_CST)
11345 return fold_build2_loc (loc, code, type,
11346 fold_convert_loc (loc, type,
11347 TREE_OPERAND (arg0, 0)),
11348 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11350 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11351 if (TREE_CODE (arg0) == BIT_AND_EXPR
11352 && integer_onep (TREE_OPERAND (arg0, 1))
11353 && integer_onep (arg1))
11354 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11355 build_zero_cst (TREE_TYPE (arg0)));
11357 /* Fold (X & Y) ^ Y as ~X & Y. */
11358 if (TREE_CODE (arg0) == BIT_AND_EXPR
11359 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11361 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11362 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11363 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11364 fold_convert_loc (loc, type, arg1));
11366 /* Fold (X & Y) ^ X as ~Y & X. */
11367 if (TREE_CODE (arg0) == BIT_AND_EXPR
11368 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11369 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11371 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11372 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11373 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11374 fold_convert_loc (loc, type, arg1));
11376 /* Fold X ^ (X & Y) as X & ~Y. */
11377 if (TREE_CODE (arg1) == BIT_AND_EXPR
11378 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11380 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11381 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11382 fold_convert_loc (loc, type, arg0),
11383 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11385 /* Fold X ^ (Y & X) as ~Y & X. */
11386 if (TREE_CODE (arg1) == BIT_AND_EXPR
11387 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11388 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11390 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11391 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11392 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11393 fold_convert_loc (loc, type, arg0));
11396 /* See if this can be simplified into a rotate first. If that
11397 is unsuccessful continue in the association code. */
11398 goto bit_rotate;
11400 case BIT_AND_EXPR:
11401 if (integer_all_onesp (arg1))
11402 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11403 if (integer_zerop (arg1))
11404 return omit_one_operand_loc (loc, type, arg1, arg0);
11405 if (operand_equal_p (arg0, arg1, 0))
11406 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11408 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11409 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11410 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11411 || (TREE_CODE (arg0) == EQ_EXPR
11412 && integer_zerop (TREE_OPERAND (arg0, 1))))
11413 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11414 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11416 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11417 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11418 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11419 || (TREE_CODE (arg1) == EQ_EXPR
11420 && integer_zerop (TREE_OPERAND (arg1, 1))))
11421 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11422 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11424 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11425 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11426 && TREE_CODE (arg1) == INTEGER_CST
11427 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11429 tree tmp1 = fold_convert_loc (loc, type, arg1);
11430 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11431 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11432 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11433 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11434 return
11435 fold_convert_loc (loc, type,
11436 fold_build2_loc (loc, BIT_IOR_EXPR,
11437 type, tmp2, tmp3));
11440 /* (X | Y) & Y is (X, Y). */
11441 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11442 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11443 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11444 /* (X | Y) & X is (Y, X). */
11445 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11446 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11447 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11448 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11449 /* X & (X | Y) is (Y, X). */
11450 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11451 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11452 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11453 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11454 /* X & (Y | X) is (Y, X). */
11455 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11456 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11457 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11458 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11460 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11461 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11462 && integer_onep (TREE_OPERAND (arg0, 1))
11463 && integer_onep (arg1))
11465 tree tem2;
11466 tem = TREE_OPERAND (arg0, 0);
11467 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11468 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11469 tem, tem2);
11470 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11471 build_zero_cst (TREE_TYPE (tem)));
11473 /* Fold ~X & 1 as (X & 1) == 0. */
11474 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11475 && integer_onep (arg1))
11477 tree tem2;
11478 tem = TREE_OPERAND (arg0, 0);
11479 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11480 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11481 tem, tem2);
11482 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11483 build_zero_cst (TREE_TYPE (tem)));
11485 /* Fold !X & 1 as X == 0. */
11486 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11487 && integer_onep (arg1))
11489 tem = TREE_OPERAND (arg0, 0);
11490 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11491 build_zero_cst (TREE_TYPE (tem)));
11494 /* Fold (X ^ Y) & Y as ~X & Y. */
11495 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11496 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11498 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11499 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11500 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11501 fold_convert_loc (loc, type, arg1));
11503 /* Fold (X ^ Y) & X as ~Y & X. */
11504 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11505 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11506 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11508 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11509 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11510 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11511 fold_convert_loc (loc, type, arg1));
11513 /* Fold X & (X ^ Y) as X & ~Y. */
11514 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11515 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11517 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11518 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11519 fold_convert_loc (loc, type, arg0),
11520 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11522 /* Fold X & (Y ^ X) as ~Y & X. */
11523 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11524 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11525 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11527 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11528 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11529 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11530 fold_convert_loc (loc, type, arg0));
11533 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11534 multiple of 1 << CST. */
11535 if (TREE_CODE (arg1) == INTEGER_CST)
11537 double_int cst1 = tree_to_double_int (arg1);
11538 double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11539 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11540 if ((cst1 & ncst1) == ncst1
11541 && multiple_of_p (type, arg0,
11542 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11543 return fold_convert_loc (loc, type, arg0);
11546 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11547 bits from CST2. */
11548 if (TREE_CODE (arg1) == INTEGER_CST
11549 && TREE_CODE (arg0) == MULT_EXPR
11550 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11552 int arg1tz
11553 = tree_to_double_int (TREE_OPERAND (arg0, 1)).trailing_zeros ();
11554 if (arg1tz > 0)
11556 double_int arg1mask, masked;
11557 arg1mask = ~double_int::mask (arg1tz);
11558 arg1mask = arg1mask.ext (TYPE_PRECISION (type),
11559 TYPE_UNSIGNED (type));
11560 masked = arg1mask & tree_to_double_int (arg1);
11561 if (masked.is_zero ())
11562 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11563 arg0, arg1);
11564 else if (masked != tree_to_double_int (arg1))
11565 return fold_build2_loc (loc, code, type, op0,
11566 double_int_to_tree (type, masked));
11570 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11571 ((A & N) + B) & M -> (A + B) & M
11572 Similarly if (N & M) == 0,
11573 ((A | N) + B) & M -> (A + B) & M
11574 and for - instead of + (or unary - instead of +)
11575 and/or ^ instead of |.
11576 If B is constant and (B & M) == 0, fold into A & M. */
11577 if (host_integerp (arg1, 1))
11579 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11580 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11581 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11582 && (TREE_CODE (arg0) == PLUS_EXPR
11583 || TREE_CODE (arg0) == MINUS_EXPR
11584 || TREE_CODE (arg0) == NEGATE_EXPR)
11585 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11586 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11588 tree pmop[2];
11589 int which = 0;
11590 unsigned HOST_WIDE_INT cst0;
11592 /* Now we know that arg0 is (C + D) or (C - D) or
11593 -C and arg1 (M) is == (1LL << cst) - 1.
11594 Store C into PMOP[0] and D into PMOP[1]. */
11595 pmop[0] = TREE_OPERAND (arg0, 0);
11596 pmop[1] = NULL;
11597 if (TREE_CODE (arg0) != NEGATE_EXPR)
11599 pmop[1] = TREE_OPERAND (arg0, 1);
11600 which = 1;
11603 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11604 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11605 & cst1) != cst1)
11606 which = -1;
11608 for (; which >= 0; which--)
11609 switch (TREE_CODE (pmop[which]))
11611 case BIT_AND_EXPR:
11612 case BIT_IOR_EXPR:
11613 case BIT_XOR_EXPR:
11614 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11615 != INTEGER_CST)
11616 break;
11617 /* tree_low_cst not used, because we don't care about
11618 the upper bits. */
11619 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11620 cst0 &= cst1;
11621 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11623 if (cst0 != cst1)
11624 break;
11626 else if (cst0 != 0)
11627 break;
11628 /* If C or D is of the form (A & N) where
11629 (N & M) == M, or of the form (A | N) or
11630 (A ^ N) where (N & M) == 0, replace it with A. */
11631 pmop[which] = TREE_OPERAND (pmop[which], 0);
11632 break;
11633 case INTEGER_CST:
11634 /* If C or D is a N where (N & M) == 0, it can be
11635 omitted (assumed 0). */
11636 if ((TREE_CODE (arg0) == PLUS_EXPR
11637 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11638 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11639 pmop[which] = NULL;
11640 break;
11641 default:
11642 break;
11645 /* Only build anything new if we optimized one or both arguments
11646 above. */
11647 if (pmop[0] != TREE_OPERAND (arg0, 0)
11648 || (TREE_CODE (arg0) != NEGATE_EXPR
11649 && pmop[1] != TREE_OPERAND (arg0, 1)))
11651 tree utype = TREE_TYPE (arg0);
11652 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11654 /* Perform the operations in a type that has defined
11655 overflow behavior. */
11656 utype = unsigned_type_for (TREE_TYPE (arg0));
11657 if (pmop[0] != NULL)
11658 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11659 if (pmop[1] != NULL)
11660 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11663 if (TREE_CODE (arg0) == NEGATE_EXPR)
11664 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11665 else if (TREE_CODE (arg0) == PLUS_EXPR)
11667 if (pmop[0] != NULL && pmop[1] != NULL)
11668 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11669 pmop[0], pmop[1]);
11670 else if (pmop[0] != NULL)
11671 tem = pmop[0];
11672 else if (pmop[1] != NULL)
11673 tem = pmop[1];
11674 else
11675 return build_int_cst (type, 0);
11677 else if (pmop[0] == NULL)
11678 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11679 else
11680 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11681 pmop[0], pmop[1]);
11682 /* TEM is now the new binary +, - or unary - replacement. */
11683 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11684 fold_convert_loc (loc, utype, arg1));
11685 return fold_convert_loc (loc, type, tem);
11690 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11691 if (t1 != NULL_TREE)
11692 return t1;
11693 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11694 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11695 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11697 unsigned int prec
11698 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11700 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11701 && (~TREE_INT_CST_LOW (arg1)
11702 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11703 return
11704 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11707 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11709 This results in more efficient code for machines without a NOR
11710 instruction. Combine will canonicalize to the first form
11711 which will allow use of NOR instructions provided by the
11712 backend if they exist. */
11713 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11714 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11716 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11717 build2 (BIT_IOR_EXPR, type,
11718 fold_convert_loc (loc, type,
11719 TREE_OPERAND (arg0, 0)),
11720 fold_convert_loc (loc, type,
11721 TREE_OPERAND (arg1, 0))));
11724 /* If arg0 is derived from the address of an object or function, we may
11725 be able to fold this expression using the object or function's
11726 alignment. */
11727 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11729 unsigned HOST_WIDE_INT modulus, residue;
11730 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11732 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11733 integer_onep (arg1));
11735 /* This works because modulus is a power of 2. If this weren't the
11736 case, we'd have to replace it by its greatest power-of-2
11737 divisor: modulus & -modulus. */
11738 if (low < modulus)
11739 return build_int_cst (type, residue & low);
11742 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11743 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11744 if the new mask might be further optimized. */
11745 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11746 || TREE_CODE (arg0) == RSHIFT_EXPR)
11747 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11748 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11749 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11750 < TYPE_PRECISION (TREE_TYPE (arg0))
11751 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11752 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11754 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11755 unsigned HOST_WIDE_INT mask
11756 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11757 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11758 tree shift_type = TREE_TYPE (arg0);
11760 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11761 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11762 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11763 && TYPE_PRECISION (TREE_TYPE (arg0))
11764 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11766 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11767 tree arg00 = TREE_OPERAND (arg0, 0);
11768 /* See if more bits can be proven as zero because of
11769 zero extension. */
11770 if (TREE_CODE (arg00) == NOP_EXPR
11771 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11773 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11774 if (TYPE_PRECISION (inner_type)
11775 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11776 && TYPE_PRECISION (inner_type) < prec)
11778 prec = TYPE_PRECISION (inner_type);
11779 /* See if we can shorten the right shift. */
11780 if (shiftc < prec)
11781 shift_type = inner_type;
11784 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11785 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11786 zerobits <<= prec - shiftc;
11787 /* For arithmetic shift if sign bit could be set, zerobits
11788 can contain actually sign bits, so no transformation is
11789 possible, unless MASK masks them all away. In that
11790 case the shift needs to be converted into logical shift. */
11791 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11792 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11794 if ((mask & zerobits) == 0)
11795 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11796 else
11797 zerobits = 0;
11801 /* ((X << 16) & 0xff00) is (X, 0). */
11802 if ((mask & zerobits) == mask)
11803 return omit_one_operand_loc (loc, type,
11804 build_int_cst (type, 0), arg0);
11806 newmask = mask | zerobits;
11807 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11809 unsigned int prec;
11811 /* Only do the transformation if NEWMASK is some integer
11812 mode's mask. */
11813 for (prec = BITS_PER_UNIT;
11814 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11815 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11816 break;
11817 if (prec < HOST_BITS_PER_WIDE_INT
11818 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11820 tree newmaskt;
11822 if (shift_type != TREE_TYPE (arg0))
11824 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11825 fold_convert_loc (loc, shift_type,
11826 TREE_OPERAND (arg0, 0)),
11827 TREE_OPERAND (arg0, 1));
11828 tem = fold_convert_loc (loc, type, tem);
11830 else
11831 tem = op0;
11832 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11833 if (!tree_int_cst_equal (newmaskt, arg1))
11834 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11839 goto associate;
11841 case RDIV_EXPR:
11842 /* Don't touch a floating-point divide by zero unless the mode
11843 of the constant can represent infinity. */
11844 if (TREE_CODE (arg1) == REAL_CST
11845 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11846 && real_zerop (arg1))
11847 return NULL_TREE;
11849 /* Optimize A / A to 1.0 if we don't care about
11850 NaNs or Infinities. Skip the transformation
11851 for non-real operands. */
11852 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11853 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11854 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11855 && operand_equal_p (arg0, arg1, 0))
11857 tree r = build_real (TREE_TYPE (arg0), dconst1);
11859 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11862 /* The complex version of the above A / A optimization. */
11863 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11864 && operand_equal_p (arg0, arg1, 0))
11866 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11867 if (! HONOR_NANS (TYPE_MODE (elem_type))
11868 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11870 tree r = build_real (elem_type, dconst1);
11871 /* omit_two_operands will call fold_convert for us. */
11872 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11876 /* (-A) / (-B) -> A / B */
11877 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11878 return fold_build2_loc (loc, RDIV_EXPR, type,
11879 TREE_OPERAND (arg0, 0),
11880 negate_expr (arg1));
11881 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11882 return fold_build2_loc (loc, RDIV_EXPR, type,
11883 negate_expr (arg0),
11884 TREE_OPERAND (arg1, 0));
11886 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11887 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11888 && real_onep (arg1))
11889 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11891 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11892 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11893 && real_minus_onep (arg1))
11894 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11895 negate_expr (arg0)));
11897 /* If ARG1 is a constant, we can convert this to a multiply by the
11898 reciprocal. This does not have the same rounding properties,
11899 so only do this if -freciprocal-math. We can actually
11900 always safely do it if ARG1 is a power of two, but it's hard to
11901 tell if it is or not in a portable manner. */
11902 if (optimize
11903 && (TREE_CODE (arg1) == REAL_CST
11904 || (TREE_CODE (arg1) == COMPLEX_CST
11905 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11906 || (TREE_CODE (arg1) == VECTOR_CST
11907 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11909 if (flag_reciprocal_math
11910 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11911 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11912 /* Find the reciprocal if optimizing and the result is exact.
11913 TODO: Complex reciprocal not implemented. */
11914 if (TREE_CODE (arg1) != COMPLEX_CST)
11916 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11918 if (inverse)
11919 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11922 /* Convert A/B/C to A/(B*C). */
11923 if (flag_reciprocal_math
11924 && TREE_CODE (arg0) == RDIV_EXPR)
11925 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11926 fold_build2_loc (loc, MULT_EXPR, type,
11927 TREE_OPERAND (arg0, 1), arg1));
11929 /* Convert A/(B/C) to (A/B)*C. */
11930 if (flag_reciprocal_math
11931 && TREE_CODE (arg1) == RDIV_EXPR)
11932 return fold_build2_loc (loc, MULT_EXPR, type,
11933 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11934 TREE_OPERAND (arg1, 0)),
11935 TREE_OPERAND (arg1, 1));
11937 /* Convert C1/(X*C2) into (C1/C2)/X. */
11938 if (flag_reciprocal_math
11939 && TREE_CODE (arg1) == MULT_EXPR
11940 && TREE_CODE (arg0) == REAL_CST
11941 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11943 tree tem = const_binop (RDIV_EXPR, arg0,
11944 TREE_OPERAND (arg1, 1));
11945 if (tem)
11946 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11947 TREE_OPERAND (arg1, 0));
11950 if (flag_unsafe_math_optimizations)
11952 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11953 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11955 /* Optimize sin(x)/cos(x) as tan(x). */
11956 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11957 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11958 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11959 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11960 CALL_EXPR_ARG (arg1, 0), 0))
11962 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11964 if (tanfn != NULL_TREE)
11965 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11968 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11969 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11970 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11971 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11972 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11973 CALL_EXPR_ARG (arg1, 0), 0))
11975 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11977 if (tanfn != NULL_TREE)
11979 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11980 CALL_EXPR_ARG (arg0, 0));
11981 return fold_build2_loc (loc, RDIV_EXPR, type,
11982 build_real (type, dconst1), tmp);
11986 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11987 NaNs or Infinities. */
11988 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11989 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11990 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11992 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11993 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11995 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11996 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11997 && operand_equal_p (arg00, arg01, 0))
11999 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12001 if (cosfn != NULL_TREE)
12002 return build_call_expr_loc (loc, cosfn, 1, arg00);
12006 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12007 NaNs or Infinities. */
12008 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12009 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12010 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12012 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12013 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12015 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12016 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12017 && operand_equal_p (arg00, arg01, 0))
12019 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12021 if (cosfn != NULL_TREE)
12023 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12024 return fold_build2_loc (loc, RDIV_EXPR, type,
12025 build_real (type, dconst1),
12026 tmp);
12031 /* Optimize pow(x,c)/x as pow(x,c-1). */
12032 if (fcode0 == BUILT_IN_POW
12033 || fcode0 == BUILT_IN_POWF
12034 || fcode0 == BUILT_IN_POWL)
12036 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12037 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12038 if (TREE_CODE (arg01) == REAL_CST
12039 && !TREE_OVERFLOW (arg01)
12040 && operand_equal_p (arg1, arg00, 0))
12042 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12043 REAL_VALUE_TYPE c;
12044 tree arg;
12046 c = TREE_REAL_CST (arg01);
12047 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12048 arg = build_real (type, c);
12049 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12053 /* Optimize a/root(b/c) into a*root(c/b). */
12054 if (BUILTIN_ROOT_P (fcode1))
12056 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12058 if (TREE_CODE (rootarg) == RDIV_EXPR)
12060 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12061 tree b = TREE_OPERAND (rootarg, 0);
12062 tree c = TREE_OPERAND (rootarg, 1);
12064 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12066 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12067 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12071 /* Optimize x/expN(y) into x*expN(-y). */
12072 if (BUILTIN_EXPONENT_P (fcode1))
12074 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12075 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12076 arg1 = build_call_expr_loc (loc,
12077 expfn, 1,
12078 fold_convert_loc (loc, type, arg));
12079 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12082 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12083 if (fcode1 == BUILT_IN_POW
12084 || fcode1 == BUILT_IN_POWF
12085 || fcode1 == BUILT_IN_POWL)
12087 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12088 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12089 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12090 tree neg11 = fold_convert_loc (loc, type,
12091 negate_expr (arg11));
12092 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12093 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12096 return NULL_TREE;
12098 case TRUNC_DIV_EXPR:
12099 /* Optimize (X & (-A)) / A where A is a power of 2,
12100 to X >> log2(A) */
12101 if (TREE_CODE (arg0) == BIT_AND_EXPR
12102 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12103 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12105 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12106 arg1, TREE_OPERAND (arg0, 1));
12107 if (sum && integer_zerop (sum)) {
12108 unsigned long pow2;
12110 if (TREE_INT_CST_LOW (arg1))
12111 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12112 else
12113 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12114 + HOST_BITS_PER_WIDE_INT;
12116 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12117 TREE_OPERAND (arg0, 0),
12118 build_int_cst (integer_type_node, pow2));
12122 /* Fall through */
12124 case FLOOR_DIV_EXPR:
12125 /* Simplify A / (B << N) where A and B are positive and B is
12126 a power of 2, to A >> (N + log2(B)). */
12127 strict_overflow_p = false;
12128 if (TREE_CODE (arg1) == LSHIFT_EXPR
12129 && (TYPE_UNSIGNED (type)
12130 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12132 tree sval = TREE_OPERAND (arg1, 0);
12133 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12135 tree sh_cnt = TREE_OPERAND (arg1, 1);
12136 unsigned long pow2;
12138 if (TREE_INT_CST_LOW (sval))
12139 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12140 else
12141 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12142 + HOST_BITS_PER_WIDE_INT;
12144 if (strict_overflow_p)
12145 fold_overflow_warning (("assuming signed overflow does not "
12146 "occur when simplifying A / (B << N)"),
12147 WARN_STRICT_OVERFLOW_MISC);
12149 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12150 sh_cnt,
12151 build_int_cst (TREE_TYPE (sh_cnt),
12152 pow2));
12153 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12154 fold_convert_loc (loc, type, arg0), sh_cnt);
12158 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12159 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12160 if (INTEGRAL_TYPE_P (type)
12161 && TYPE_UNSIGNED (type)
12162 && code == FLOOR_DIV_EXPR)
12163 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12165 /* Fall through */
12167 case ROUND_DIV_EXPR:
12168 case CEIL_DIV_EXPR:
12169 case EXACT_DIV_EXPR:
12170 if (integer_onep (arg1))
12171 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12172 if (integer_zerop (arg1))
12173 return NULL_TREE;
12174 /* X / -1 is -X. */
12175 if (!TYPE_UNSIGNED (type)
12176 && TREE_CODE (arg1) == INTEGER_CST
12177 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12178 && TREE_INT_CST_HIGH (arg1) == -1)
12179 return fold_convert_loc (loc, type, negate_expr (arg0));
12181 /* Convert -A / -B to A / B when the type is signed and overflow is
12182 undefined. */
12183 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12184 && TREE_CODE (arg0) == NEGATE_EXPR
12185 && negate_expr_p (arg1))
12187 if (INTEGRAL_TYPE_P (type))
12188 fold_overflow_warning (("assuming signed overflow does not occur "
12189 "when distributing negation across "
12190 "division"),
12191 WARN_STRICT_OVERFLOW_MISC);
12192 return fold_build2_loc (loc, code, type,
12193 fold_convert_loc (loc, type,
12194 TREE_OPERAND (arg0, 0)),
12195 fold_convert_loc (loc, type,
12196 negate_expr (arg1)));
12198 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12199 && TREE_CODE (arg1) == NEGATE_EXPR
12200 && negate_expr_p (arg0))
12202 if (INTEGRAL_TYPE_P (type))
12203 fold_overflow_warning (("assuming signed overflow does not occur "
12204 "when distributing negation across "
12205 "division"),
12206 WARN_STRICT_OVERFLOW_MISC);
12207 return fold_build2_loc (loc, code, type,
12208 fold_convert_loc (loc, type,
12209 negate_expr (arg0)),
12210 fold_convert_loc (loc, type,
12211 TREE_OPERAND (arg1, 0)));
12214 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12215 operation, EXACT_DIV_EXPR.
12217 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12218 At one time others generated faster code, it's not clear if they do
12219 after the last round to changes to the DIV code in expmed.c. */
12220 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12221 && multiple_of_p (type, arg0, arg1))
12222 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12224 strict_overflow_p = false;
12225 if (TREE_CODE (arg1) == INTEGER_CST
12226 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12227 &strict_overflow_p)))
12229 if (strict_overflow_p)
12230 fold_overflow_warning (("assuming signed overflow does not occur "
12231 "when simplifying division"),
12232 WARN_STRICT_OVERFLOW_MISC);
12233 return fold_convert_loc (loc, type, tem);
12236 return NULL_TREE;
12238 case CEIL_MOD_EXPR:
12239 case FLOOR_MOD_EXPR:
12240 case ROUND_MOD_EXPR:
12241 case TRUNC_MOD_EXPR:
12242 /* X % 1 is always zero, but be sure to preserve any side
12243 effects in X. */
12244 if (integer_onep (arg1))
12245 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12247 /* X % 0, return X % 0 unchanged so that we can get the
12248 proper warnings and errors. */
12249 if (integer_zerop (arg1))
12250 return NULL_TREE;
12252 /* 0 % X is always zero, but be sure to preserve any side
12253 effects in X. Place this after checking for X == 0. */
12254 if (integer_zerop (arg0))
12255 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12257 /* X % -1 is zero. */
12258 if (!TYPE_UNSIGNED (type)
12259 && TREE_CODE (arg1) == INTEGER_CST
12260 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12261 && TREE_INT_CST_HIGH (arg1) == -1)
12262 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12264 /* X % -C is the same as X % C. */
12265 if (code == TRUNC_MOD_EXPR
12266 && !TYPE_UNSIGNED (type)
12267 && TREE_CODE (arg1) == INTEGER_CST
12268 && !TREE_OVERFLOW (arg1)
12269 && TREE_INT_CST_HIGH (arg1) < 0
12270 && !TYPE_OVERFLOW_TRAPS (type)
12271 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12272 && !sign_bit_p (arg1, arg1))
12273 return fold_build2_loc (loc, code, type,
12274 fold_convert_loc (loc, type, arg0),
12275 fold_convert_loc (loc, type,
12276 negate_expr (arg1)));
12278 /* X % -Y is the same as X % Y. */
12279 if (code == TRUNC_MOD_EXPR
12280 && !TYPE_UNSIGNED (type)
12281 && TREE_CODE (arg1) == NEGATE_EXPR
12282 && !TYPE_OVERFLOW_TRAPS (type))
12283 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12284 fold_convert_loc (loc, type,
12285 TREE_OPERAND (arg1, 0)));
12287 strict_overflow_p = false;
12288 if (TREE_CODE (arg1) == INTEGER_CST
12289 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12290 &strict_overflow_p)))
12292 if (strict_overflow_p)
12293 fold_overflow_warning (("assuming signed overflow does not occur "
12294 "when simplifying modulus"),
12295 WARN_STRICT_OVERFLOW_MISC);
12296 return fold_convert_loc (loc, type, tem);
12299 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12300 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12301 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12302 && (TYPE_UNSIGNED (type)
12303 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12305 tree c = arg1;
12306 /* Also optimize A % (C << N) where C is a power of 2,
12307 to A & ((C << N) - 1). */
12308 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12309 c = TREE_OPERAND (arg1, 0);
12311 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12313 tree mask
12314 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12315 build_int_cst (TREE_TYPE (arg1), 1));
12316 if (strict_overflow_p)
12317 fold_overflow_warning (("assuming signed overflow does not "
12318 "occur when simplifying "
12319 "X % (power of two)"),
12320 WARN_STRICT_OVERFLOW_MISC);
12321 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12322 fold_convert_loc (loc, type, arg0),
12323 fold_convert_loc (loc, type, mask));
12327 return NULL_TREE;
12329 case LROTATE_EXPR:
12330 case RROTATE_EXPR:
12331 if (integer_all_onesp (arg0))
12332 return omit_one_operand_loc (loc, type, arg0, arg1);
12333 goto shift;
12335 case RSHIFT_EXPR:
12336 /* Optimize -1 >> x for arithmetic right shifts. */
12337 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12338 && tree_expr_nonnegative_p (arg1))
12339 return omit_one_operand_loc (loc, type, arg0, arg1);
12340 /* ... fall through ... */
12342 case LSHIFT_EXPR:
12343 shift:
12344 if (integer_zerop (arg1))
12345 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12346 if (integer_zerop (arg0))
12347 return omit_one_operand_loc (loc, type, arg0, arg1);
12349 /* Since negative shift count is not well-defined,
12350 don't try to compute it in the compiler. */
12351 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12352 return NULL_TREE;
12354 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12355 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12356 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12357 && host_integerp (TREE_OPERAND (arg0, 1), false)
12358 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12360 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12361 + TREE_INT_CST_LOW (arg1));
12363 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12364 being well defined. */
12365 if (low >= TYPE_PRECISION (type))
12367 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12368 low = low % TYPE_PRECISION (type);
12369 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12370 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12371 TREE_OPERAND (arg0, 0));
12372 else
12373 low = TYPE_PRECISION (type) - 1;
12376 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12377 build_int_cst (type, low));
12380 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12381 into x & ((unsigned)-1 >> c) for unsigned types. */
12382 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12383 || (TYPE_UNSIGNED (type)
12384 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12385 && host_integerp (arg1, false)
12386 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12387 && host_integerp (TREE_OPERAND (arg0, 1), false)
12388 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12390 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12391 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12392 tree lshift;
12393 tree arg00;
12395 if (low0 == low1)
12397 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12399 lshift = build_int_cst (type, -1);
12400 lshift = int_const_binop (code, lshift, arg1);
12402 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12406 /* Rewrite an LROTATE_EXPR by a constant into an
12407 RROTATE_EXPR by a new constant. */
12408 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12410 tree tem = build_int_cst (TREE_TYPE (arg1),
12411 TYPE_PRECISION (type));
12412 tem = const_binop (MINUS_EXPR, tem, arg1);
12413 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12416 /* If we have a rotate of a bit operation with the rotate count and
12417 the second operand of the bit operation both constant,
12418 permute the two operations. */
12419 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12420 && (TREE_CODE (arg0) == BIT_AND_EXPR
12421 || TREE_CODE (arg0) == BIT_IOR_EXPR
12422 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12423 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12424 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12425 fold_build2_loc (loc, code, type,
12426 TREE_OPERAND (arg0, 0), arg1),
12427 fold_build2_loc (loc, code, type,
12428 TREE_OPERAND (arg0, 1), arg1));
12430 /* Two consecutive rotates adding up to the precision of the
12431 type can be ignored. */
12432 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12433 && TREE_CODE (arg0) == RROTATE_EXPR
12434 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12435 && TREE_INT_CST_HIGH (arg1) == 0
12436 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12437 && ((TREE_INT_CST_LOW (arg1)
12438 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12439 == (unsigned int) TYPE_PRECISION (type)))
12440 return TREE_OPERAND (arg0, 0);
12442 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12443 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12444 if the latter can be further optimized. */
12445 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12446 && TREE_CODE (arg0) == BIT_AND_EXPR
12447 && TREE_CODE (arg1) == INTEGER_CST
12448 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12450 tree mask = fold_build2_loc (loc, code, type,
12451 fold_convert_loc (loc, type,
12452 TREE_OPERAND (arg0, 1)),
12453 arg1);
12454 tree shift = fold_build2_loc (loc, code, type,
12455 fold_convert_loc (loc, type,
12456 TREE_OPERAND (arg0, 0)),
12457 arg1);
12458 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12459 if (tem)
12460 return tem;
12463 return NULL_TREE;
12465 case MIN_EXPR:
12466 if (operand_equal_p (arg0, arg1, 0))
12467 return omit_one_operand_loc (loc, type, arg0, arg1);
12468 if (INTEGRAL_TYPE_P (type)
12469 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12470 return omit_one_operand_loc (loc, type, arg1, arg0);
12471 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12472 if (tem)
12473 return tem;
12474 goto associate;
12476 case MAX_EXPR:
12477 if (operand_equal_p (arg0, arg1, 0))
12478 return omit_one_operand_loc (loc, type, arg0, arg1);
12479 if (INTEGRAL_TYPE_P (type)
12480 && TYPE_MAX_VALUE (type)
12481 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12482 return omit_one_operand_loc (loc, type, arg1, arg0);
12483 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12484 if (tem)
12485 return tem;
12486 goto associate;
12488 case TRUTH_ANDIF_EXPR:
12489 /* Note that the operands of this must be ints
12490 and their values must be 0 or 1.
12491 ("true" is a fixed value perhaps depending on the language.) */
12492 /* If first arg is constant zero, return it. */
12493 if (integer_zerop (arg0))
12494 return fold_convert_loc (loc, type, arg0);
12495 case TRUTH_AND_EXPR:
12496 /* If either arg is constant true, drop it. */
12497 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12498 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12499 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12500 /* Preserve sequence points. */
12501 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12502 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12503 /* If second arg is constant zero, result is zero, but first arg
12504 must be evaluated. */
12505 if (integer_zerop (arg1))
12506 return omit_one_operand_loc (loc, type, arg1, arg0);
12507 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12508 case will be handled here. */
12509 if (integer_zerop (arg0))
12510 return omit_one_operand_loc (loc, type, arg0, arg1);
12512 /* !X && X is always false. */
12513 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12514 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12515 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12516 /* X && !X is always false. */
12517 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12518 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12519 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12521 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12522 means A >= Y && A != MAX, but in this case we know that
12523 A < X <= MAX. */
12525 if (!TREE_SIDE_EFFECTS (arg0)
12526 && !TREE_SIDE_EFFECTS (arg1))
12528 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12529 if (tem && !operand_equal_p (tem, arg0, 0))
12530 return fold_build2_loc (loc, code, type, tem, arg1);
12532 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12533 if (tem && !operand_equal_p (tem, arg1, 0))
12534 return fold_build2_loc (loc, code, type, arg0, tem);
12537 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12538 != NULL_TREE)
12539 return tem;
12541 return NULL_TREE;
12543 case TRUTH_ORIF_EXPR:
12544 /* Note that the operands of this must be ints
12545 and their values must be 0 or true.
12546 ("true" is a fixed value perhaps depending on the language.) */
12547 /* If first arg is constant true, return it. */
12548 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12549 return fold_convert_loc (loc, type, arg0);
12550 case TRUTH_OR_EXPR:
12551 /* If either arg is constant zero, drop it. */
12552 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12553 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12554 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12555 /* Preserve sequence points. */
12556 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12557 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12558 /* If second arg is constant true, result is true, but we must
12559 evaluate first arg. */
12560 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12561 return omit_one_operand_loc (loc, type, arg1, arg0);
12562 /* Likewise for first arg, but note this only occurs here for
12563 TRUTH_OR_EXPR. */
12564 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12565 return omit_one_operand_loc (loc, type, arg0, arg1);
12567 /* !X || X is always true. */
12568 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12569 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12570 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12571 /* X || !X is always true. */
12572 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12573 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12574 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12576 /* (X && !Y) || (!X && Y) is X ^ Y */
12577 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12578 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12580 tree a0, a1, l0, l1, n0, n1;
12582 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12583 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12585 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12586 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12588 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12589 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12591 if ((operand_equal_p (n0, a0, 0)
12592 && operand_equal_p (n1, a1, 0))
12593 || (operand_equal_p (n0, a1, 0)
12594 && operand_equal_p (n1, a0, 0)))
12595 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12598 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12599 != NULL_TREE)
12600 return tem;
12602 return NULL_TREE;
12604 case TRUTH_XOR_EXPR:
12605 /* If the second arg is constant zero, drop it. */
12606 if (integer_zerop (arg1))
12607 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12608 /* If the second arg is constant true, this is a logical inversion. */
12609 if (integer_onep (arg1))
12611 /* Only call invert_truthvalue if operand is a truth value. */
12612 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12613 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12614 else
12615 tem = invert_truthvalue_loc (loc, arg0);
12616 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12618 /* Identical arguments cancel to zero. */
12619 if (operand_equal_p (arg0, arg1, 0))
12620 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12622 /* !X ^ X is always true. */
12623 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12624 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12625 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12627 /* X ^ !X is always true. */
12628 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12629 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12630 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12632 return NULL_TREE;
12634 case EQ_EXPR:
12635 case NE_EXPR:
12636 STRIP_NOPS (arg0);
12637 STRIP_NOPS (arg1);
12639 tem = fold_comparison (loc, code, type, op0, op1);
12640 if (tem != NULL_TREE)
12641 return tem;
12643 /* bool_var != 0 becomes bool_var. */
12644 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12645 && code == NE_EXPR)
12646 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12648 /* bool_var == 1 becomes bool_var. */
12649 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12650 && code == EQ_EXPR)
12651 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12653 /* bool_var != 1 becomes !bool_var. */
12654 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12655 && code == NE_EXPR)
12656 return fold_convert_loc (loc, type,
12657 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12658 TREE_TYPE (arg0), arg0));
12660 /* bool_var == 0 becomes !bool_var. */
12661 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12662 && code == EQ_EXPR)
12663 return fold_convert_loc (loc, type,
12664 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12665 TREE_TYPE (arg0), arg0));
12667 /* !exp != 0 becomes !exp */
12668 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12669 && code == NE_EXPR)
12670 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12672 /* If this is an equality comparison of the address of two non-weak,
12673 unaliased symbols neither of which are extern (since we do not
12674 have access to attributes for externs), then we know the result. */
12675 if (TREE_CODE (arg0) == ADDR_EXPR
12676 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12677 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12678 && ! lookup_attribute ("alias",
12679 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12680 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12681 && TREE_CODE (arg1) == ADDR_EXPR
12682 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12683 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12684 && ! lookup_attribute ("alias",
12685 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12686 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12688 /* We know that we're looking at the address of two
12689 non-weak, unaliased, static _DECL nodes.
12691 It is both wasteful and incorrect to call operand_equal_p
12692 to compare the two ADDR_EXPR nodes. It is wasteful in that
12693 all we need to do is test pointer equality for the arguments
12694 to the two ADDR_EXPR nodes. It is incorrect to use
12695 operand_equal_p as that function is NOT equivalent to a
12696 C equality test. It can in fact return false for two
12697 objects which would test as equal using the C equality
12698 operator. */
12699 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12700 return constant_boolean_node (equal
12701 ? code == EQ_EXPR : code != EQ_EXPR,
12702 type);
12705 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12706 a MINUS_EXPR of a constant, we can convert it into a comparison with
12707 a revised constant as long as no overflow occurs. */
12708 if (TREE_CODE (arg1) == INTEGER_CST
12709 && (TREE_CODE (arg0) == PLUS_EXPR
12710 || TREE_CODE (arg0) == MINUS_EXPR)
12711 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12712 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12713 ? MINUS_EXPR : PLUS_EXPR,
12714 fold_convert_loc (loc, TREE_TYPE (arg0),
12715 arg1),
12716 TREE_OPERAND (arg0, 1)))
12717 && !TREE_OVERFLOW (tem))
12718 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12720 /* Similarly for a NEGATE_EXPR. */
12721 if (TREE_CODE (arg0) == NEGATE_EXPR
12722 && TREE_CODE (arg1) == INTEGER_CST
12723 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12724 arg1)))
12725 && TREE_CODE (tem) == INTEGER_CST
12726 && !TREE_OVERFLOW (tem))
12727 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12729 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12730 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12731 && TREE_CODE (arg1) == INTEGER_CST
12732 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12733 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12734 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12735 fold_convert_loc (loc,
12736 TREE_TYPE (arg0),
12737 arg1),
12738 TREE_OPERAND (arg0, 1)));
12740 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12741 if ((TREE_CODE (arg0) == PLUS_EXPR
12742 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12743 || TREE_CODE (arg0) == MINUS_EXPR)
12744 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12745 0)),
12746 arg1, 0)
12747 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12748 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12750 tree val = TREE_OPERAND (arg0, 1);
12751 return omit_two_operands_loc (loc, type,
12752 fold_build2_loc (loc, code, type,
12753 val,
12754 build_int_cst (TREE_TYPE (val),
12755 0)),
12756 TREE_OPERAND (arg0, 0), arg1);
12759 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12760 if (TREE_CODE (arg0) == MINUS_EXPR
12761 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12762 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12763 1)),
12764 arg1, 0)
12765 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12767 return omit_two_operands_loc (loc, type,
12768 code == NE_EXPR
12769 ? boolean_true_node : boolean_false_node,
12770 TREE_OPERAND (arg0, 1), arg1);
12773 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12774 for !=. Don't do this for ordered comparisons due to overflow. */
12775 if (TREE_CODE (arg0) == MINUS_EXPR
12776 && integer_zerop (arg1))
12777 return fold_build2_loc (loc, code, type,
12778 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12780 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12781 if (TREE_CODE (arg0) == ABS_EXPR
12782 && (integer_zerop (arg1) || real_zerop (arg1)))
12783 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12785 /* If this is an EQ or NE comparison with zero and ARG0 is
12786 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12787 two operations, but the latter can be done in one less insn
12788 on machines that have only two-operand insns or on which a
12789 constant cannot be the first operand. */
12790 if (TREE_CODE (arg0) == BIT_AND_EXPR
12791 && integer_zerop (arg1))
12793 tree arg00 = TREE_OPERAND (arg0, 0);
12794 tree arg01 = TREE_OPERAND (arg0, 1);
12795 if (TREE_CODE (arg00) == LSHIFT_EXPR
12796 && integer_onep (TREE_OPERAND (arg00, 0)))
12798 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12799 arg01, TREE_OPERAND (arg00, 1));
12800 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12801 build_int_cst (TREE_TYPE (arg0), 1));
12802 return fold_build2_loc (loc, code, type,
12803 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12804 arg1);
12806 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12807 && integer_onep (TREE_OPERAND (arg01, 0)))
12809 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12810 arg00, TREE_OPERAND (arg01, 1));
12811 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12812 build_int_cst (TREE_TYPE (arg0), 1));
12813 return fold_build2_loc (loc, code, type,
12814 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12815 arg1);
12819 /* If this is an NE or EQ comparison of zero against the result of a
12820 signed MOD operation whose second operand is a power of 2, make
12821 the MOD operation unsigned since it is simpler and equivalent. */
12822 if (integer_zerop (arg1)
12823 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12824 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12825 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12826 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12827 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12828 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12830 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12831 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12832 fold_convert_loc (loc, newtype,
12833 TREE_OPERAND (arg0, 0)),
12834 fold_convert_loc (loc, newtype,
12835 TREE_OPERAND (arg0, 1)));
12837 return fold_build2_loc (loc, code, type, newmod,
12838 fold_convert_loc (loc, newtype, arg1));
12841 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12842 C1 is a valid shift constant, and C2 is a power of two, i.e.
12843 a single bit. */
12844 if (TREE_CODE (arg0) == BIT_AND_EXPR
12845 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12846 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12847 == INTEGER_CST
12848 && integer_pow2p (TREE_OPERAND (arg0, 1))
12849 && integer_zerop (arg1))
12851 tree itype = TREE_TYPE (arg0);
12852 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12853 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12855 /* Check for a valid shift count. */
12856 if (TREE_INT_CST_HIGH (arg001) == 0
12857 && TREE_INT_CST_LOW (arg001) < prec)
12859 tree arg01 = TREE_OPERAND (arg0, 1);
12860 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12861 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12862 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12863 can be rewritten as (X & (C2 << C1)) != 0. */
12864 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12866 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12867 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12868 return fold_build2_loc (loc, code, type, tem,
12869 fold_convert_loc (loc, itype, arg1));
12871 /* Otherwise, for signed (arithmetic) shifts,
12872 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12873 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12874 else if (!TYPE_UNSIGNED (itype))
12875 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12876 arg000, build_int_cst (itype, 0));
12877 /* Otherwise, of unsigned (logical) shifts,
12878 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12879 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12880 else
12881 return omit_one_operand_loc (loc, type,
12882 code == EQ_EXPR ? integer_one_node
12883 : integer_zero_node,
12884 arg000);
12888 /* If we have (A & C) == C where C is a power of 2, convert this into
12889 (A & C) != 0. Similarly for NE_EXPR. */
12890 if (TREE_CODE (arg0) == BIT_AND_EXPR
12891 && integer_pow2p (TREE_OPERAND (arg0, 1))
12892 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12893 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12894 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12895 integer_zero_node));
12897 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12898 bit, then fold the expression into A < 0 or A >= 0. */
12899 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12900 if (tem)
12901 return tem;
12903 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12904 Similarly for NE_EXPR. */
12905 if (TREE_CODE (arg0) == BIT_AND_EXPR
12906 && TREE_CODE (arg1) == INTEGER_CST
12907 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12909 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12910 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12911 TREE_OPERAND (arg0, 1));
12912 tree dandnotc
12913 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12914 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12915 notc);
12916 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12917 if (integer_nonzerop (dandnotc))
12918 return omit_one_operand_loc (loc, type, rslt, arg0);
12921 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12922 Similarly for NE_EXPR. */
12923 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12924 && TREE_CODE (arg1) == INTEGER_CST
12925 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12927 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12928 tree candnotd
12929 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12930 TREE_OPERAND (arg0, 1),
12931 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12932 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12933 if (integer_nonzerop (candnotd))
12934 return omit_one_operand_loc (loc, type, rslt, arg0);
12937 /* If this is a comparison of a field, we may be able to simplify it. */
12938 if ((TREE_CODE (arg0) == COMPONENT_REF
12939 || TREE_CODE (arg0) == BIT_FIELD_REF)
12940 /* Handle the constant case even without -O
12941 to make sure the warnings are given. */
12942 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12944 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12945 if (t1)
12946 return t1;
12949 /* Optimize comparisons of strlen vs zero to a compare of the
12950 first character of the string vs zero. To wit,
12951 strlen(ptr) == 0 => *ptr == 0
12952 strlen(ptr) != 0 => *ptr != 0
12953 Other cases should reduce to one of these two (or a constant)
12954 due to the return value of strlen being unsigned. */
12955 if (TREE_CODE (arg0) == CALL_EXPR
12956 && integer_zerop (arg1))
12958 tree fndecl = get_callee_fndecl (arg0);
12960 if (fndecl
12961 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12962 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12963 && call_expr_nargs (arg0) == 1
12964 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12966 tree iref = build_fold_indirect_ref_loc (loc,
12967 CALL_EXPR_ARG (arg0, 0));
12968 return fold_build2_loc (loc, code, type, iref,
12969 build_int_cst (TREE_TYPE (iref), 0));
12973 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12974 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12975 if (TREE_CODE (arg0) == RSHIFT_EXPR
12976 && integer_zerop (arg1)
12977 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12979 tree arg00 = TREE_OPERAND (arg0, 0);
12980 tree arg01 = TREE_OPERAND (arg0, 1);
12981 tree itype = TREE_TYPE (arg00);
12982 if (TREE_INT_CST_HIGH (arg01) == 0
12983 && TREE_INT_CST_LOW (arg01)
12984 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12986 if (TYPE_UNSIGNED (itype))
12988 itype = signed_type_for (itype);
12989 arg00 = fold_convert_loc (loc, itype, arg00);
12991 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12992 type, arg00, build_zero_cst (itype));
12996 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12997 if (integer_zerop (arg1)
12998 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12999 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13000 TREE_OPERAND (arg0, 1));
13002 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13003 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13004 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13005 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13006 build_zero_cst (TREE_TYPE (arg0)));
13007 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13008 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13009 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13010 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13011 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13012 build_zero_cst (TREE_TYPE (arg0)));
13014 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13015 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13016 && TREE_CODE (arg1) == INTEGER_CST
13017 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13018 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13019 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13020 TREE_OPERAND (arg0, 1), arg1));
13022 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13023 (X & C) == 0 when C is a single bit. */
13024 if (TREE_CODE (arg0) == BIT_AND_EXPR
13025 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13026 && integer_zerop (arg1)
13027 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13029 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13030 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13031 TREE_OPERAND (arg0, 1));
13032 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13033 type, tem,
13034 fold_convert_loc (loc, TREE_TYPE (arg0),
13035 arg1));
13038 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13039 constant C is a power of two, i.e. a single bit. */
13040 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13041 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13042 && integer_zerop (arg1)
13043 && integer_pow2p (TREE_OPERAND (arg0, 1))
13044 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13045 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13047 tree arg00 = TREE_OPERAND (arg0, 0);
13048 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13049 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13052 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13053 when is C is a power of two, i.e. a single bit. */
13054 if (TREE_CODE (arg0) == BIT_AND_EXPR
13055 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13056 && integer_zerop (arg1)
13057 && integer_pow2p (TREE_OPERAND (arg0, 1))
13058 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13059 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13061 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13062 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13063 arg000, TREE_OPERAND (arg0, 1));
13064 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13065 tem, build_int_cst (TREE_TYPE (tem), 0));
13068 if (integer_zerop (arg1)
13069 && tree_expr_nonzero_p (arg0))
13071 tree res = constant_boolean_node (code==NE_EXPR, type);
13072 return omit_one_operand_loc (loc, type, res, arg0);
13075 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13076 if (TREE_CODE (arg0) == NEGATE_EXPR
13077 && TREE_CODE (arg1) == NEGATE_EXPR)
13078 return fold_build2_loc (loc, code, type,
13079 TREE_OPERAND (arg0, 0),
13080 fold_convert_loc (loc, TREE_TYPE (arg0),
13081 TREE_OPERAND (arg1, 0)));
13083 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13084 if (TREE_CODE (arg0) == BIT_AND_EXPR
13085 && TREE_CODE (arg1) == BIT_AND_EXPR)
13087 tree arg00 = TREE_OPERAND (arg0, 0);
13088 tree arg01 = TREE_OPERAND (arg0, 1);
13089 tree arg10 = TREE_OPERAND (arg1, 0);
13090 tree arg11 = TREE_OPERAND (arg1, 1);
13091 tree itype = TREE_TYPE (arg0);
13093 if (operand_equal_p (arg01, arg11, 0))
13094 return fold_build2_loc (loc, code, type,
13095 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13096 fold_build2_loc (loc,
13097 BIT_XOR_EXPR, itype,
13098 arg00, arg10),
13099 arg01),
13100 build_zero_cst (itype));
13102 if (operand_equal_p (arg01, arg10, 0))
13103 return fold_build2_loc (loc, code, type,
13104 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13105 fold_build2_loc (loc,
13106 BIT_XOR_EXPR, itype,
13107 arg00, arg11),
13108 arg01),
13109 build_zero_cst (itype));
13111 if (operand_equal_p (arg00, arg11, 0))
13112 return fold_build2_loc (loc, code, type,
13113 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13114 fold_build2_loc (loc,
13115 BIT_XOR_EXPR, itype,
13116 arg01, arg10),
13117 arg00),
13118 build_zero_cst (itype));
13120 if (operand_equal_p (arg00, arg10, 0))
13121 return fold_build2_loc (loc, code, type,
13122 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13123 fold_build2_loc (loc,
13124 BIT_XOR_EXPR, itype,
13125 arg01, arg11),
13126 arg00),
13127 build_zero_cst (itype));
13130 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13131 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13133 tree arg00 = TREE_OPERAND (arg0, 0);
13134 tree arg01 = TREE_OPERAND (arg0, 1);
13135 tree arg10 = TREE_OPERAND (arg1, 0);
13136 tree arg11 = TREE_OPERAND (arg1, 1);
13137 tree itype = TREE_TYPE (arg0);
13139 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13140 operand_equal_p guarantees no side-effects so we don't need
13141 to use omit_one_operand on Z. */
13142 if (operand_equal_p (arg01, arg11, 0))
13143 return fold_build2_loc (loc, code, type, arg00,
13144 fold_convert_loc (loc, TREE_TYPE (arg00),
13145 arg10));
13146 if (operand_equal_p (arg01, arg10, 0))
13147 return fold_build2_loc (loc, code, type, arg00,
13148 fold_convert_loc (loc, TREE_TYPE (arg00),
13149 arg11));
13150 if (operand_equal_p (arg00, arg11, 0))
13151 return fold_build2_loc (loc, code, type, arg01,
13152 fold_convert_loc (loc, TREE_TYPE (arg01),
13153 arg10));
13154 if (operand_equal_p (arg00, arg10, 0))
13155 return fold_build2_loc (loc, code, type, arg01,
13156 fold_convert_loc (loc, TREE_TYPE (arg01),
13157 arg11));
13159 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13160 if (TREE_CODE (arg01) == INTEGER_CST
13161 && TREE_CODE (arg11) == INTEGER_CST)
13163 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13164 fold_convert_loc (loc, itype, arg11));
13165 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13166 return fold_build2_loc (loc, code, type, tem,
13167 fold_convert_loc (loc, itype, arg10));
13171 /* Attempt to simplify equality/inequality comparisons of complex
13172 values. Only lower the comparison if the result is known or
13173 can be simplified to a single scalar comparison. */
13174 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13175 || TREE_CODE (arg0) == COMPLEX_CST)
13176 && (TREE_CODE (arg1) == COMPLEX_EXPR
13177 || TREE_CODE (arg1) == COMPLEX_CST))
13179 tree real0, imag0, real1, imag1;
13180 tree rcond, icond;
13182 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13184 real0 = TREE_OPERAND (arg0, 0);
13185 imag0 = TREE_OPERAND (arg0, 1);
13187 else
13189 real0 = TREE_REALPART (arg0);
13190 imag0 = TREE_IMAGPART (arg0);
13193 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13195 real1 = TREE_OPERAND (arg1, 0);
13196 imag1 = TREE_OPERAND (arg1, 1);
13198 else
13200 real1 = TREE_REALPART (arg1);
13201 imag1 = TREE_IMAGPART (arg1);
13204 rcond = fold_binary_loc (loc, code, type, real0, real1);
13205 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13207 if (integer_zerop (rcond))
13209 if (code == EQ_EXPR)
13210 return omit_two_operands_loc (loc, type, boolean_false_node,
13211 imag0, imag1);
13212 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13214 else
13216 if (code == NE_EXPR)
13217 return omit_two_operands_loc (loc, type, boolean_true_node,
13218 imag0, imag1);
13219 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13223 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13224 if (icond && TREE_CODE (icond) == INTEGER_CST)
13226 if (integer_zerop (icond))
13228 if (code == EQ_EXPR)
13229 return omit_two_operands_loc (loc, type, boolean_false_node,
13230 real0, real1);
13231 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13233 else
13235 if (code == NE_EXPR)
13236 return omit_two_operands_loc (loc, type, boolean_true_node,
13237 real0, real1);
13238 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13243 return NULL_TREE;
13245 case LT_EXPR:
13246 case GT_EXPR:
13247 case LE_EXPR:
13248 case GE_EXPR:
13249 tem = fold_comparison (loc, code, type, op0, op1);
13250 if (tem != NULL_TREE)
13251 return tem;
13253 /* Transform comparisons of the form X +- C CMP X. */
13254 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13255 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13256 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13257 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13258 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13259 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13261 tree arg01 = TREE_OPERAND (arg0, 1);
13262 enum tree_code code0 = TREE_CODE (arg0);
13263 int is_positive;
13265 if (TREE_CODE (arg01) == REAL_CST)
13266 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13267 else
13268 is_positive = tree_int_cst_sgn (arg01);
13270 /* (X - c) > X becomes false. */
13271 if (code == GT_EXPR
13272 && ((code0 == MINUS_EXPR && is_positive >= 0)
13273 || (code0 == PLUS_EXPR && is_positive <= 0)))
13275 if (TREE_CODE (arg01) == INTEGER_CST
13276 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13277 fold_overflow_warning (("assuming signed overflow does not "
13278 "occur when assuming that (X - c) > X "
13279 "is always false"),
13280 WARN_STRICT_OVERFLOW_ALL);
13281 return constant_boolean_node (0, type);
13284 /* Likewise (X + c) < X becomes false. */
13285 if (code == LT_EXPR
13286 && ((code0 == PLUS_EXPR && is_positive >= 0)
13287 || (code0 == MINUS_EXPR && is_positive <= 0)))
13289 if (TREE_CODE (arg01) == INTEGER_CST
13290 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13291 fold_overflow_warning (("assuming signed overflow does not "
13292 "occur when assuming that "
13293 "(X + c) < X is always false"),
13294 WARN_STRICT_OVERFLOW_ALL);
13295 return constant_boolean_node (0, type);
13298 /* Convert (X - c) <= X to true. */
13299 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13300 && code == LE_EXPR
13301 && ((code0 == MINUS_EXPR && is_positive >= 0)
13302 || (code0 == PLUS_EXPR && is_positive <= 0)))
13304 if (TREE_CODE (arg01) == INTEGER_CST
13305 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13306 fold_overflow_warning (("assuming signed overflow does not "
13307 "occur when assuming that "
13308 "(X - c) <= X is always true"),
13309 WARN_STRICT_OVERFLOW_ALL);
13310 return constant_boolean_node (1, type);
13313 /* Convert (X + c) >= X to true. */
13314 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13315 && code == GE_EXPR
13316 && ((code0 == PLUS_EXPR && is_positive >= 0)
13317 || (code0 == MINUS_EXPR && is_positive <= 0)))
13319 if (TREE_CODE (arg01) == INTEGER_CST
13320 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13321 fold_overflow_warning (("assuming signed overflow does not "
13322 "occur when assuming that "
13323 "(X + c) >= X is always true"),
13324 WARN_STRICT_OVERFLOW_ALL);
13325 return constant_boolean_node (1, type);
13328 if (TREE_CODE (arg01) == INTEGER_CST)
13330 /* Convert X + c > X and X - c < X to true for integers. */
13331 if (code == GT_EXPR
13332 && ((code0 == PLUS_EXPR && is_positive > 0)
13333 || (code0 == MINUS_EXPR && is_positive < 0)))
13335 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13336 fold_overflow_warning (("assuming signed overflow does "
13337 "not occur when assuming that "
13338 "(X + c) > X is always true"),
13339 WARN_STRICT_OVERFLOW_ALL);
13340 return constant_boolean_node (1, type);
13343 if (code == LT_EXPR
13344 && ((code0 == MINUS_EXPR && is_positive > 0)
13345 || (code0 == PLUS_EXPR && is_positive < 0)))
13347 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13348 fold_overflow_warning (("assuming signed overflow does "
13349 "not occur when assuming that "
13350 "(X - c) < X is always true"),
13351 WARN_STRICT_OVERFLOW_ALL);
13352 return constant_boolean_node (1, type);
13355 /* Convert X + c <= X and X - c >= X to false for integers. */
13356 if (code == LE_EXPR
13357 && ((code0 == PLUS_EXPR && is_positive > 0)
13358 || (code0 == MINUS_EXPR && is_positive < 0)))
13360 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13361 fold_overflow_warning (("assuming signed overflow does "
13362 "not occur when assuming that "
13363 "(X + c) <= X is always false"),
13364 WARN_STRICT_OVERFLOW_ALL);
13365 return constant_boolean_node (0, type);
13368 if (code == GE_EXPR
13369 && ((code0 == MINUS_EXPR && is_positive > 0)
13370 || (code0 == PLUS_EXPR && is_positive < 0)))
13372 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13373 fold_overflow_warning (("assuming signed overflow does "
13374 "not occur when assuming that "
13375 "(X - c) >= X is always false"),
13376 WARN_STRICT_OVERFLOW_ALL);
13377 return constant_boolean_node (0, type);
13382 /* Comparisons with the highest or lowest possible integer of
13383 the specified precision will have known values. */
13385 tree arg1_type = TREE_TYPE (arg1);
13386 unsigned int width = TYPE_PRECISION (arg1_type);
13388 if (TREE_CODE (arg1) == INTEGER_CST
13389 && width <= HOST_BITS_PER_DOUBLE_INT
13390 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13392 HOST_WIDE_INT signed_max_hi;
13393 unsigned HOST_WIDE_INT signed_max_lo;
13394 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13396 if (width <= HOST_BITS_PER_WIDE_INT)
13398 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13399 - 1;
13400 signed_max_hi = 0;
13401 max_hi = 0;
13403 if (TYPE_UNSIGNED (arg1_type))
13405 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13406 min_lo = 0;
13407 min_hi = 0;
13409 else
13411 max_lo = signed_max_lo;
13412 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13413 min_hi = -1;
13416 else
13418 width -= HOST_BITS_PER_WIDE_INT;
13419 signed_max_lo = -1;
13420 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13421 - 1;
13422 max_lo = -1;
13423 min_lo = 0;
13425 if (TYPE_UNSIGNED (arg1_type))
13427 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13428 min_hi = 0;
13430 else
13432 max_hi = signed_max_hi;
13433 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13437 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13438 && TREE_INT_CST_LOW (arg1) == max_lo)
13439 switch (code)
13441 case GT_EXPR:
13442 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13444 case GE_EXPR:
13445 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13447 case LE_EXPR:
13448 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13450 case LT_EXPR:
13451 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13453 /* The GE_EXPR and LT_EXPR cases above are not normally
13454 reached because of previous transformations. */
13456 default:
13457 break;
13459 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13460 == max_hi
13461 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13462 switch (code)
13464 case GT_EXPR:
13465 arg1 = const_binop (PLUS_EXPR, arg1,
13466 build_int_cst (TREE_TYPE (arg1), 1));
13467 return fold_build2_loc (loc, EQ_EXPR, type,
13468 fold_convert_loc (loc,
13469 TREE_TYPE (arg1), arg0),
13470 arg1);
13471 case LE_EXPR:
13472 arg1 = const_binop (PLUS_EXPR, arg1,
13473 build_int_cst (TREE_TYPE (arg1), 1));
13474 return fold_build2_loc (loc, NE_EXPR, type,
13475 fold_convert_loc (loc, TREE_TYPE (arg1),
13476 arg0),
13477 arg1);
13478 default:
13479 break;
13481 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13482 == min_hi
13483 && TREE_INT_CST_LOW (arg1) == min_lo)
13484 switch (code)
13486 case LT_EXPR:
13487 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13489 case LE_EXPR:
13490 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13492 case GE_EXPR:
13493 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13495 case GT_EXPR:
13496 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13498 default:
13499 break;
13501 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13502 == min_hi
13503 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13504 switch (code)
13506 case GE_EXPR:
13507 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13508 return fold_build2_loc (loc, NE_EXPR, type,
13509 fold_convert_loc (loc,
13510 TREE_TYPE (arg1), arg0),
13511 arg1);
13512 case LT_EXPR:
13513 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13514 return fold_build2_loc (loc, EQ_EXPR, type,
13515 fold_convert_loc (loc, TREE_TYPE (arg1),
13516 arg0),
13517 arg1);
13518 default:
13519 break;
13522 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13523 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13524 && TYPE_UNSIGNED (arg1_type)
13525 /* We will flip the signedness of the comparison operator
13526 associated with the mode of arg1, so the sign bit is
13527 specified by this mode. Check that arg1 is the signed
13528 max associated with this sign bit. */
13529 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13530 /* signed_type does not work on pointer types. */
13531 && INTEGRAL_TYPE_P (arg1_type))
13533 /* The following case also applies to X < signed_max+1
13534 and X >= signed_max+1 because previous transformations. */
13535 if (code == LE_EXPR || code == GT_EXPR)
13537 tree st;
13538 st = signed_type_for (TREE_TYPE (arg1));
13539 return fold_build2_loc (loc,
13540 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13541 type, fold_convert_loc (loc, st, arg0),
13542 build_int_cst (st, 0));
13548 /* If we are comparing an ABS_EXPR with a constant, we can
13549 convert all the cases into explicit comparisons, but they may
13550 well not be faster than doing the ABS and one comparison.
13551 But ABS (X) <= C is a range comparison, which becomes a subtraction
13552 and a comparison, and is probably faster. */
13553 if (code == LE_EXPR
13554 && TREE_CODE (arg1) == INTEGER_CST
13555 && TREE_CODE (arg0) == ABS_EXPR
13556 && ! TREE_SIDE_EFFECTS (arg0)
13557 && (0 != (tem = negate_expr (arg1)))
13558 && TREE_CODE (tem) == INTEGER_CST
13559 && !TREE_OVERFLOW (tem))
13560 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13561 build2 (GE_EXPR, type,
13562 TREE_OPERAND (arg0, 0), tem),
13563 build2 (LE_EXPR, type,
13564 TREE_OPERAND (arg0, 0), arg1));
13566 /* Convert ABS_EXPR<x> >= 0 to true. */
13567 strict_overflow_p = false;
13568 if (code == GE_EXPR
13569 && (integer_zerop (arg1)
13570 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13571 && real_zerop (arg1)))
13572 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13574 if (strict_overflow_p)
13575 fold_overflow_warning (("assuming signed overflow does not occur "
13576 "when simplifying comparison of "
13577 "absolute value and zero"),
13578 WARN_STRICT_OVERFLOW_CONDITIONAL);
13579 return omit_one_operand_loc (loc, type,
13580 constant_boolean_node (true, type),
13581 arg0);
13584 /* Convert ABS_EXPR<x> < 0 to false. */
13585 strict_overflow_p = false;
13586 if (code == LT_EXPR
13587 && (integer_zerop (arg1) || real_zerop (arg1))
13588 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13590 if (strict_overflow_p)
13591 fold_overflow_warning (("assuming signed overflow does not occur "
13592 "when simplifying comparison of "
13593 "absolute value and zero"),
13594 WARN_STRICT_OVERFLOW_CONDITIONAL);
13595 return omit_one_operand_loc (loc, type,
13596 constant_boolean_node (false, type),
13597 arg0);
13600 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13601 and similarly for >= into !=. */
13602 if ((code == LT_EXPR || code == GE_EXPR)
13603 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13604 && TREE_CODE (arg1) == LSHIFT_EXPR
13605 && integer_onep (TREE_OPERAND (arg1, 0)))
13606 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13607 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13608 TREE_OPERAND (arg1, 1)),
13609 build_zero_cst (TREE_TYPE (arg0)));
13611 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13612 otherwise Y might be >= # of bits in X's type and thus e.g.
13613 (unsigned char) (1 << Y) for Y 15 might be 0.
13614 If the cast is widening, then 1 << Y should have unsigned type,
13615 otherwise if Y is number of bits in the signed shift type minus 1,
13616 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13617 31 might be 0xffffffff80000000. */
13618 if ((code == LT_EXPR || code == GE_EXPR)
13619 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13620 && CONVERT_EXPR_P (arg1)
13621 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13622 && (TYPE_PRECISION (TREE_TYPE (arg1))
13623 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13624 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13625 || (TYPE_PRECISION (TREE_TYPE (arg1))
13626 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13627 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13629 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13630 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13631 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13632 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13633 build_zero_cst (TREE_TYPE (arg0)));
13636 return NULL_TREE;
13638 case UNORDERED_EXPR:
13639 case ORDERED_EXPR:
13640 case UNLT_EXPR:
13641 case UNLE_EXPR:
13642 case UNGT_EXPR:
13643 case UNGE_EXPR:
13644 case UNEQ_EXPR:
13645 case LTGT_EXPR:
13646 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13648 t1 = fold_relational_const (code, type, arg0, arg1);
13649 if (t1 != NULL_TREE)
13650 return t1;
13653 /* If the first operand is NaN, the result is constant. */
13654 if (TREE_CODE (arg0) == REAL_CST
13655 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13656 && (code != LTGT_EXPR || ! flag_trapping_math))
13658 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13659 ? integer_zero_node
13660 : integer_one_node;
13661 return omit_one_operand_loc (loc, type, t1, arg1);
13664 /* If the second operand is NaN, the result is constant. */
13665 if (TREE_CODE (arg1) == REAL_CST
13666 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13667 && (code != LTGT_EXPR || ! flag_trapping_math))
13669 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13670 ? integer_zero_node
13671 : integer_one_node;
13672 return omit_one_operand_loc (loc, type, t1, arg0);
13675 /* Simplify unordered comparison of something with itself. */
13676 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13677 && operand_equal_p (arg0, arg1, 0))
13678 return constant_boolean_node (1, type);
13680 if (code == LTGT_EXPR
13681 && !flag_trapping_math
13682 && operand_equal_p (arg0, arg1, 0))
13683 return constant_boolean_node (0, type);
13685 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13687 tree targ0 = strip_float_extensions (arg0);
13688 tree targ1 = strip_float_extensions (arg1);
13689 tree newtype = TREE_TYPE (targ0);
13691 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13692 newtype = TREE_TYPE (targ1);
13694 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13695 return fold_build2_loc (loc, code, type,
13696 fold_convert_loc (loc, newtype, targ0),
13697 fold_convert_loc (loc, newtype, targ1));
13700 return NULL_TREE;
13702 case COMPOUND_EXPR:
13703 /* When pedantic, a compound expression can be neither an lvalue
13704 nor an integer constant expression. */
13705 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13706 return NULL_TREE;
13707 /* Don't let (0, 0) be null pointer constant. */
13708 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13709 : fold_convert_loc (loc, type, arg1);
13710 return pedantic_non_lvalue_loc (loc, tem);
13712 case COMPLEX_EXPR:
13713 if ((TREE_CODE (arg0) == REAL_CST
13714 && TREE_CODE (arg1) == REAL_CST)
13715 || (TREE_CODE (arg0) == INTEGER_CST
13716 && TREE_CODE (arg1) == INTEGER_CST))
13717 return build_complex (type, arg0, arg1);
13718 if (TREE_CODE (arg0) == REALPART_EXPR
13719 && TREE_CODE (arg1) == IMAGPART_EXPR
13720 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13721 && operand_equal_p (TREE_OPERAND (arg0, 0),
13722 TREE_OPERAND (arg1, 0), 0))
13723 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13724 TREE_OPERAND (arg1, 0));
13725 return NULL_TREE;
13727 case ASSERT_EXPR:
13728 /* An ASSERT_EXPR should never be passed to fold_binary. */
13729 gcc_unreachable ();
13731 case VEC_PACK_TRUNC_EXPR:
13732 case VEC_PACK_FIX_TRUNC_EXPR:
13734 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13735 tree *elts;
13737 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13738 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13739 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13740 return NULL_TREE;
13742 elts = XALLOCAVEC (tree, nelts);
13743 if (!vec_cst_ctor_to_array (arg0, elts)
13744 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13745 return NULL_TREE;
13747 for (i = 0; i < nelts; i++)
13749 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13750 ? NOP_EXPR : FIX_TRUNC_EXPR,
13751 TREE_TYPE (type), elts[i]);
13752 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13753 return NULL_TREE;
13756 return build_vector (type, elts);
13759 case VEC_WIDEN_MULT_LO_EXPR:
13760 case VEC_WIDEN_MULT_HI_EXPR:
13761 case VEC_WIDEN_MULT_EVEN_EXPR:
13762 case VEC_WIDEN_MULT_ODD_EXPR:
13764 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13765 unsigned int out, ofs, scale;
13766 tree *elts;
13768 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13769 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13770 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13771 return NULL_TREE;
13773 elts = XALLOCAVEC (tree, nelts * 4);
13774 if (!vec_cst_ctor_to_array (arg0, elts)
13775 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13776 return NULL_TREE;
13778 if (code == VEC_WIDEN_MULT_LO_EXPR)
13779 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13780 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13781 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13782 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13783 scale = 1, ofs = 0;
13784 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13785 scale = 1, ofs = 1;
13787 for (out = 0; out < nelts; out++)
13789 unsigned int in1 = (out << scale) + ofs;
13790 unsigned int in2 = in1 + nelts * 2;
13791 tree t1, t2;
13793 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13794 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13796 if (t1 == NULL_TREE || t2 == NULL_TREE)
13797 return NULL_TREE;
13798 elts[out] = const_binop (MULT_EXPR, t1, t2);
13799 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13800 return NULL_TREE;
13803 return build_vector (type, elts);
13806 default:
13807 return NULL_TREE;
13808 } /* switch (code) */
13811 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13812 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13813 of GOTO_EXPR. */
13815 static tree
13816 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13818 switch (TREE_CODE (*tp))
13820 case LABEL_EXPR:
13821 return *tp;
13823 case GOTO_EXPR:
13824 *walk_subtrees = 0;
13826 /* ... fall through ... */
13828 default:
13829 return NULL_TREE;
13833 /* Return whether the sub-tree ST contains a label which is accessible from
13834 outside the sub-tree. */
13836 static bool
13837 contains_label_p (tree st)
13839 return
13840 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13843 /* Fold a ternary expression of code CODE and type TYPE with operands
13844 OP0, OP1, and OP2. Return the folded expression if folding is
13845 successful. Otherwise, return NULL_TREE. */
13847 tree
13848 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13849 tree op0, tree op1, tree op2)
13851 tree tem;
13852 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13853 enum tree_code_class kind = TREE_CODE_CLASS (code);
13855 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13856 && TREE_CODE_LENGTH (code) == 3);
13858 /* Strip any conversions that don't change the mode. This is safe
13859 for every expression, except for a comparison expression because
13860 its signedness is derived from its operands. So, in the latter
13861 case, only strip conversions that don't change the signedness.
13863 Note that this is done as an internal manipulation within the
13864 constant folder, in order to find the simplest representation of
13865 the arguments so that their form can be studied. In any cases,
13866 the appropriate type conversions should be put back in the tree
13867 that will get out of the constant folder. */
13868 if (op0)
13870 arg0 = op0;
13871 STRIP_NOPS (arg0);
13874 if (op1)
13876 arg1 = op1;
13877 STRIP_NOPS (arg1);
13880 if (op2)
13882 arg2 = op2;
13883 STRIP_NOPS (arg2);
13886 switch (code)
13888 case COMPONENT_REF:
13889 if (TREE_CODE (arg0) == CONSTRUCTOR
13890 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13892 unsigned HOST_WIDE_INT idx;
13893 tree field, value;
13894 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13895 if (field == arg1)
13896 return value;
13898 return NULL_TREE;
13900 case COND_EXPR:
13901 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13902 so all simple results must be passed through pedantic_non_lvalue. */
13903 if (TREE_CODE (arg0) == INTEGER_CST)
13905 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13906 tem = integer_zerop (arg0) ? op2 : op1;
13907 /* Only optimize constant conditions when the selected branch
13908 has the same type as the COND_EXPR. This avoids optimizing
13909 away "c ? x : throw", where the throw has a void type.
13910 Avoid throwing away that operand which contains label. */
13911 if ((!TREE_SIDE_EFFECTS (unused_op)
13912 || !contains_label_p (unused_op))
13913 && (! VOID_TYPE_P (TREE_TYPE (tem))
13914 || VOID_TYPE_P (type)))
13915 return pedantic_non_lvalue_loc (loc, tem);
13916 return NULL_TREE;
13918 if (operand_equal_p (arg1, op2, 0))
13919 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13921 /* If we have A op B ? A : C, we may be able to convert this to a
13922 simpler expression, depending on the operation and the values
13923 of B and C. Signed zeros prevent all of these transformations,
13924 for reasons given above each one.
13926 Also try swapping the arguments and inverting the conditional. */
13927 if (COMPARISON_CLASS_P (arg0)
13928 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13929 arg1, TREE_OPERAND (arg0, 1))
13930 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13932 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13933 if (tem)
13934 return tem;
13937 if (COMPARISON_CLASS_P (arg0)
13938 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13939 op2,
13940 TREE_OPERAND (arg0, 1))
13941 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13943 location_t loc0 = expr_location_or (arg0, loc);
13944 tem = fold_truth_not_expr (loc0, arg0);
13945 if (tem && COMPARISON_CLASS_P (tem))
13947 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13948 if (tem)
13949 return tem;
13953 /* If the second operand is simpler than the third, swap them
13954 since that produces better jump optimization results. */
13955 if (truth_value_p (TREE_CODE (arg0))
13956 && tree_swap_operands_p (op1, op2, false))
13958 location_t loc0 = expr_location_or (arg0, loc);
13959 /* See if this can be inverted. If it can't, possibly because
13960 it was a floating-point inequality comparison, don't do
13961 anything. */
13962 tem = fold_truth_not_expr (loc0, arg0);
13963 if (tem)
13964 return fold_build3_loc (loc, code, type, tem, op2, op1);
13967 /* Convert A ? 1 : 0 to simply A. */
13968 if (integer_onep (op1)
13969 && integer_zerop (op2)
13970 /* If we try to convert OP0 to our type, the
13971 call to fold will try to move the conversion inside
13972 a COND, which will recurse. In that case, the COND_EXPR
13973 is probably the best choice, so leave it alone. */
13974 && type == TREE_TYPE (arg0))
13975 return pedantic_non_lvalue_loc (loc, arg0);
13977 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13978 over COND_EXPR in cases such as floating point comparisons. */
13979 if (integer_zerop (op1)
13980 && integer_onep (op2)
13981 && truth_value_p (TREE_CODE (arg0)))
13982 return pedantic_non_lvalue_loc (loc,
13983 fold_convert_loc (loc, type,
13984 invert_truthvalue_loc (loc,
13985 arg0)));
13987 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13988 if (TREE_CODE (arg0) == LT_EXPR
13989 && integer_zerop (TREE_OPERAND (arg0, 1))
13990 && integer_zerop (op2)
13991 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13993 /* sign_bit_p only checks ARG1 bits within A's precision.
13994 If <sign bit of A> has wider type than A, bits outside
13995 of A's precision in <sign bit of A> need to be checked.
13996 If they are all 0, this optimization needs to be done
13997 in unsigned A's type, if they are all 1 in signed A's type,
13998 otherwise this can't be done. */
13999 if (TYPE_PRECISION (TREE_TYPE (tem))
14000 < TYPE_PRECISION (TREE_TYPE (arg1))
14001 && TYPE_PRECISION (TREE_TYPE (tem))
14002 < TYPE_PRECISION (type))
14004 unsigned HOST_WIDE_INT mask_lo;
14005 HOST_WIDE_INT mask_hi;
14006 int inner_width, outer_width;
14007 tree tem_type;
14009 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14010 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14011 if (outer_width > TYPE_PRECISION (type))
14012 outer_width = TYPE_PRECISION (type);
14014 if (outer_width > HOST_BITS_PER_WIDE_INT)
14016 mask_hi = ((unsigned HOST_WIDE_INT) -1
14017 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14018 mask_lo = -1;
14020 else
14022 mask_hi = 0;
14023 mask_lo = ((unsigned HOST_WIDE_INT) -1
14024 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14026 if (inner_width > HOST_BITS_PER_WIDE_INT)
14028 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
14029 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14030 mask_lo = 0;
14032 else
14033 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
14034 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14036 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14037 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14039 tem_type = signed_type_for (TREE_TYPE (tem));
14040 tem = fold_convert_loc (loc, tem_type, tem);
14042 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14043 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14045 tem_type = unsigned_type_for (TREE_TYPE (tem));
14046 tem = fold_convert_loc (loc, tem_type, tem);
14048 else
14049 tem = NULL;
14052 if (tem)
14053 return
14054 fold_convert_loc (loc, type,
14055 fold_build2_loc (loc, BIT_AND_EXPR,
14056 TREE_TYPE (tem), tem,
14057 fold_convert_loc (loc,
14058 TREE_TYPE (tem),
14059 arg1)));
14062 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14063 already handled above. */
14064 if (TREE_CODE (arg0) == BIT_AND_EXPR
14065 && integer_onep (TREE_OPERAND (arg0, 1))
14066 && integer_zerop (op2)
14067 && integer_pow2p (arg1))
14069 tree tem = TREE_OPERAND (arg0, 0);
14070 STRIP_NOPS (tem);
14071 if (TREE_CODE (tem) == RSHIFT_EXPR
14072 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14073 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14074 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14075 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14076 TREE_OPERAND (tem, 0), arg1);
14079 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14080 is probably obsolete because the first operand should be a
14081 truth value (that's why we have the two cases above), but let's
14082 leave it in until we can confirm this for all front-ends. */
14083 if (integer_zerop (op2)
14084 && TREE_CODE (arg0) == NE_EXPR
14085 && integer_zerop (TREE_OPERAND (arg0, 1))
14086 && integer_pow2p (arg1)
14087 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14088 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14089 arg1, OEP_ONLY_CONST))
14090 return pedantic_non_lvalue_loc (loc,
14091 fold_convert_loc (loc, type,
14092 TREE_OPERAND (arg0, 0)));
14094 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14095 if (integer_zerop (op2)
14096 && truth_value_p (TREE_CODE (arg0))
14097 && truth_value_p (TREE_CODE (arg1)))
14098 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14099 fold_convert_loc (loc, type, arg0),
14100 arg1);
14102 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14103 if (integer_onep (op2)
14104 && truth_value_p (TREE_CODE (arg0))
14105 && truth_value_p (TREE_CODE (arg1)))
14107 location_t loc0 = expr_location_or (arg0, loc);
14108 /* Only perform transformation if ARG0 is easily inverted. */
14109 tem = fold_truth_not_expr (loc0, arg0);
14110 if (tem)
14111 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14112 fold_convert_loc (loc, type, tem),
14113 arg1);
14116 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14117 if (integer_zerop (arg1)
14118 && truth_value_p (TREE_CODE (arg0))
14119 && truth_value_p (TREE_CODE (op2)))
14121 location_t loc0 = expr_location_or (arg0, loc);
14122 /* Only perform transformation if ARG0 is easily inverted. */
14123 tem = fold_truth_not_expr (loc0, arg0);
14124 if (tem)
14125 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14126 fold_convert_loc (loc, type, tem),
14127 op2);
14130 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14131 if (integer_onep (arg1)
14132 && truth_value_p (TREE_CODE (arg0))
14133 && truth_value_p (TREE_CODE (op2)))
14134 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14135 fold_convert_loc (loc, type, arg0),
14136 op2);
14138 return NULL_TREE;
14140 case VEC_COND_EXPR:
14141 if (TREE_CODE (arg0) == VECTOR_CST)
14143 if (integer_all_onesp (arg0) && !TREE_SIDE_EFFECTS (op2))
14144 return pedantic_non_lvalue_loc (loc, op1);
14145 if (integer_zerop (arg0) && !TREE_SIDE_EFFECTS (op1))
14146 return pedantic_non_lvalue_loc (loc, op2);
14148 return NULL_TREE;
14150 case CALL_EXPR:
14151 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14152 of fold_ternary on them. */
14153 gcc_unreachable ();
14155 case BIT_FIELD_REF:
14156 if ((TREE_CODE (arg0) == VECTOR_CST
14157 || (TREE_CODE (arg0) == CONSTRUCTOR
14158 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14159 && (type == TREE_TYPE (TREE_TYPE (arg0))
14160 || (TREE_CODE (type) == VECTOR_TYPE
14161 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14163 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14164 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14165 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14166 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14168 if (n != 0
14169 && (idx % width) == 0
14170 && (n % width) == 0
14171 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14173 idx = idx / width;
14174 n = n / width;
14176 if (TREE_CODE (arg0) == VECTOR_CST)
14178 if (n == 1)
14179 return VECTOR_CST_ELT (arg0, idx);
14181 tree *vals = XALLOCAVEC (tree, n);
14182 for (unsigned i = 0; i < n; ++i)
14183 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14184 return build_vector (type, vals);
14187 /* Constructor elements can be subvectors. */
14188 unsigned HOST_WIDE_INT k = 1;
14189 if (CONSTRUCTOR_NELTS (arg0) != 0)
14191 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14192 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14193 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14196 /* We keep an exact subset of the constructor elements. */
14197 if ((idx % k) == 0 && (n % k) == 0)
14199 if (CONSTRUCTOR_NELTS (arg0) == 0)
14200 return build_constructor (type, NULL);
14201 idx /= k;
14202 n /= k;
14203 if (n == 1)
14205 if (idx < CONSTRUCTOR_NELTS (arg0))
14206 return CONSTRUCTOR_ELT (arg0, idx)->value;
14207 return build_zero_cst (type);
14210 vec<constructor_elt, va_gc> *vals;
14211 vec_alloc (vals, n);
14212 for (unsigned i = 0;
14213 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14214 ++i)
14215 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14216 CONSTRUCTOR_ELT
14217 (arg0, idx + i)->value);
14218 return build_constructor (type, vals);
14220 /* The bitfield references a single constructor element. */
14221 else if (idx + n <= (idx / k + 1) * k)
14223 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14224 return build_zero_cst (type);
14225 else if (n == k)
14226 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14227 else
14228 return fold_build3_loc (loc, code, type,
14229 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14230 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14235 /* A bit-field-ref that referenced the full argument can be stripped. */
14236 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14237 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14238 && integer_zerop (op2))
14239 return fold_convert_loc (loc, type, arg0);
14241 /* On constants we can use native encode/interpret to constant
14242 fold (nearly) all BIT_FIELD_REFs. */
14243 if (CONSTANT_CLASS_P (arg0)
14244 && can_native_interpret_type_p (type)
14245 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14246 /* This limitation should not be necessary, we just need to
14247 round this up to mode size. */
14248 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14249 /* Need bit-shifting of the buffer to relax the following. */
14250 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14252 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14253 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14254 unsigned HOST_WIDE_INT clen;
14255 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14256 /* ??? We cannot tell native_encode_expr to start at
14257 some random byte only. So limit us to a reasonable amount
14258 of work. */
14259 if (clen <= 4096)
14261 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14262 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14263 if (len > 0
14264 && len * BITS_PER_UNIT >= bitpos + bitsize)
14266 tree v = native_interpret_expr (type,
14267 b + bitpos / BITS_PER_UNIT,
14268 bitsize / BITS_PER_UNIT);
14269 if (v)
14270 return v;
14275 return NULL_TREE;
14277 case FMA_EXPR:
14278 /* For integers we can decompose the FMA if possible. */
14279 if (TREE_CODE (arg0) == INTEGER_CST
14280 && TREE_CODE (arg1) == INTEGER_CST)
14281 return fold_build2_loc (loc, PLUS_EXPR, type,
14282 const_binop (MULT_EXPR, arg0, arg1), arg2);
14283 if (integer_zerop (arg2))
14284 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14286 return fold_fma (loc, type, arg0, arg1, arg2);
14288 case VEC_PERM_EXPR:
14289 if (TREE_CODE (arg2) == VECTOR_CST)
14291 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14292 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14293 tree t;
14294 bool need_mask_canon = false;
14295 bool all_in_vec0 = true;
14296 bool all_in_vec1 = true;
14297 bool maybe_identity = true;
14298 bool single_arg = (op0 == op1);
14299 bool changed = false;
14301 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14302 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14303 for (i = 0; i < nelts; i++)
14305 tree val = VECTOR_CST_ELT (arg2, i);
14306 if (TREE_CODE (val) != INTEGER_CST)
14307 return NULL_TREE;
14309 sel[i] = TREE_INT_CST_LOW (val) & mask;
14310 if (TREE_INT_CST_HIGH (val)
14311 || ((unsigned HOST_WIDE_INT)
14312 TREE_INT_CST_LOW (val) != sel[i]))
14313 need_mask_canon = true;
14315 if (sel[i] < nelts)
14316 all_in_vec1 = false;
14317 else
14318 all_in_vec0 = false;
14320 if ((sel[i] & (nelts-1)) != i)
14321 maybe_identity = false;
14324 if (maybe_identity)
14326 if (all_in_vec0)
14327 return op0;
14328 if (all_in_vec1)
14329 return op1;
14332 if (all_in_vec0)
14333 op1 = op0;
14334 else if (all_in_vec1)
14336 op0 = op1;
14337 for (i = 0; i < nelts; i++)
14338 sel[i] -= nelts;
14339 need_mask_canon = true;
14342 if ((TREE_CODE (op0) == VECTOR_CST
14343 || TREE_CODE (op0) == CONSTRUCTOR)
14344 && (TREE_CODE (op1) == VECTOR_CST
14345 || TREE_CODE (op1) == CONSTRUCTOR))
14347 t = fold_vec_perm (type, op0, op1, sel);
14348 if (t != NULL_TREE)
14349 return t;
14352 if (op0 == op1 && !single_arg)
14353 changed = true;
14355 if (need_mask_canon && arg2 == op2)
14357 tree *tsel = XALLOCAVEC (tree, nelts);
14358 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14359 for (i = 0; i < nelts; i++)
14360 tsel[i] = build_int_cst (eltype, sel[i]);
14361 op2 = build_vector (TREE_TYPE (arg2), tsel);
14362 changed = true;
14365 if (changed)
14366 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14368 return NULL_TREE;
14370 default:
14371 return NULL_TREE;
14372 } /* switch (code) */
14375 /* Perform constant folding and related simplification of EXPR.
14376 The related simplifications include x*1 => x, x*0 => 0, etc.,
14377 and application of the associative law.
14378 NOP_EXPR conversions may be removed freely (as long as we
14379 are careful not to change the type of the overall expression).
14380 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14381 but we can constant-fold them if they have constant operands. */
14383 #ifdef ENABLE_FOLD_CHECKING
14384 # define fold(x) fold_1 (x)
14385 static tree fold_1 (tree);
14386 static
14387 #endif
14388 tree
14389 fold (tree expr)
14391 const tree t = expr;
14392 enum tree_code code = TREE_CODE (t);
14393 enum tree_code_class kind = TREE_CODE_CLASS (code);
14394 tree tem;
14395 location_t loc = EXPR_LOCATION (expr);
14397 /* Return right away if a constant. */
14398 if (kind == tcc_constant)
14399 return t;
14401 /* CALL_EXPR-like objects with variable numbers of operands are
14402 treated specially. */
14403 if (kind == tcc_vl_exp)
14405 if (code == CALL_EXPR)
14407 tem = fold_call_expr (loc, expr, false);
14408 return tem ? tem : expr;
14410 return expr;
14413 if (IS_EXPR_CODE_CLASS (kind))
14415 tree type = TREE_TYPE (t);
14416 tree op0, op1, op2;
14418 switch (TREE_CODE_LENGTH (code))
14420 case 1:
14421 op0 = TREE_OPERAND (t, 0);
14422 tem = fold_unary_loc (loc, code, type, op0);
14423 return tem ? tem : expr;
14424 case 2:
14425 op0 = TREE_OPERAND (t, 0);
14426 op1 = TREE_OPERAND (t, 1);
14427 tem = fold_binary_loc (loc, code, type, op0, op1);
14428 return tem ? tem : expr;
14429 case 3:
14430 op0 = TREE_OPERAND (t, 0);
14431 op1 = TREE_OPERAND (t, 1);
14432 op2 = TREE_OPERAND (t, 2);
14433 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14434 return tem ? tem : expr;
14435 default:
14436 break;
14440 switch (code)
14442 case ARRAY_REF:
14444 tree op0 = TREE_OPERAND (t, 0);
14445 tree op1 = TREE_OPERAND (t, 1);
14447 if (TREE_CODE (op1) == INTEGER_CST
14448 && TREE_CODE (op0) == CONSTRUCTOR
14449 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14451 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14452 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14453 unsigned HOST_WIDE_INT begin = 0;
14455 /* Find a matching index by means of a binary search. */
14456 while (begin != end)
14458 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14459 tree index = (*elts)[middle].index;
14461 if (TREE_CODE (index) == INTEGER_CST
14462 && tree_int_cst_lt (index, op1))
14463 begin = middle + 1;
14464 else if (TREE_CODE (index) == INTEGER_CST
14465 && tree_int_cst_lt (op1, index))
14466 end = middle;
14467 else if (TREE_CODE (index) == RANGE_EXPR
14468 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14469 begin = middle + 1;
14470 else if (TREE_CODE (index) == RANGE_EXPR
14471 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14472 end = middle;
14473 else
14474 return (*elts)[middle].value;
14478 return t;
14481 /* Return a VECTOR_CST if possible. */
14482 case CONSTRUCTOR:
14484 tree type = TREE_TYPE (t);
14485 if (TREE_CODE (type) != VECTOR_TYPE)
14486 return t;
14488 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14489 unsigned HOST_WIDE_INT idx, pos = 0;
14490 tree value;
14492 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14494 if (!CONSTANT_CLASS_P (value))
14495 return t;
14496 if (TREE_CODE (value) == VECTOR_CST)
14498 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14499 vec[pos++] = VECTOR_CST_ELT (value, i);
14501 else
14502 vec[pos++] = value;
14504 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14505 vec[pos] = build_zero_cst (TREE_TYPE (type));
14507 return build_vector (type, vec);
14510 case CONST_DECL:
14511 return fold (DECL_INITIAL (t));
14513 default:
14514 return t;
14515 } /* switch (code) */
14518 #ifdef ENABLE_FOLD_CHECKING
14519 #undef fold
14521 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14522 hash_table <pointer_hash <tree_node> >);
14523 static void fold_check_failed (const_tree, const_tree);
14524 void print_fold_checksum (const_tree);
14526 /* When --enable-checking=fold, compute a digest of expr before
14527 and after actual fold call to see if fold did not accidentally
14528 change original expr. */
14530 tree
14531 fold (tree expr)
14533 tree ret;
14534 struct md5_ctx ctx;
14535 unsigned char checksum_before[16], checksum_after[16];
14536 hash_table <pointer_hash <tree_node> > ht;
14538 ht.create (32);
14539 md5_init_ctx (&ctx);
14540 fold_checksum_tree (expr, &ctx, ht);
14541 md5_finish_ctx (&ctx, checksum_before);
14542 ht.empty ();
14544 ret = fold_1 (expr);
14546 md5_init_ctx (&ctx);
14547 fold_checksum_tree (expr, &ctx, ht);
14548 md5_finish_ctx (&ctx, checksum_after);
14549 ht.dispose ();
14551 if (memcmp (checksum_before, checksum_after, 16))
14552 fold_check_failed (expr, ret);
14554 return ret;
14557 void
14558 print_fold_checksum (const_tree expr)
14560 struct md5_ctx ctx;
14561 unsigned char checksum[16], cnt;
14562 hash_table <pointer_hash <tree_node> > ht;
14564 ht.create (32);
14565 md5_init_ctx (&ctx);
14566 fold_checksum_tree (expr, &ctx, ht);
14567 md5_finish_ctx (&ctx, checksum);
14568 ht.dispose ();
14569 for (cnt = 0; cnt < 16; ++cnt)
14570 fprintf (stderr, "%02x", checksum[cnt]);
14571 putc ('\n', stderr);
14574 static void
14575 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14577 internal_error ("fold check: original tree changed by fold");
14580 static void
14581 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14582 hash_table <pointer_hash <tree_node> > ht)
14584 tree_node **slot;
14585 enum tree_code code;
14586 union tree_node buf;
14587 int i, len;
14589 recursive_label:
14590 if (expr == NULL)
14591 return;
14592 slot = ht.find_slot (expr, INSERT);
14593 if (*slot != NULL)
14594 return;
14595 *slot = CONST_CAST_TREE (expr);
14596 code = TREE_CODE (expr);
14597 if (TREE_CODE_CLASS (code) == tcc_declaration
14598 && DECL_ASSEMBLER_NAME_SET_P (expr))
14600 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14601 memcpy ((char *) &buf, expr, tree_size (expr));
14602 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14603 expr = (tree) &buf;
14605 else if (TREE_CODE_CLASS (code) == tcc_type
14606 && (TYPE_POINTER_TO (expr)
14607 || TYPE_REFERENCE_TO (expr)
14608 || TYPE_CACHED_VALUES_P (expr)
14609 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14610 || TYPE_NEXT_VARIANT (expr)))
14612 /* Allow these fields to be modified. */
14613 tree tmp;
14614 memcpy ((char *) &buf, expr, tree_size (expr));
14615 expr = tmp = (tree) &buf;
14616 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14617 TYPE_POINTER_TO (tmp) = NULL;
14618 TYPE_REFERENCE_TO (tmp) = NULL;
14619 TYPE_NEXT_VARIANT (tmp) = NULL;
14620 if (TYPE_CACHED_VALUES_P (tmp))
14622 TYPE_CACHED_VALUES_P (tmp) = 0;
14623 TYPE_CACHED_VALUES (tmp) = NULL;
14626 md5_process_bytes (expr, tree_size (expr), ctx);
14627 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14628 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14629 if (TREE_CODE_CLASS (code) != tcc_type
14630 && TREE_CODE_CLASS (code) != tcc_declaration
14631 && code != TREE_LIST
14632 && code != SSA_NAME
14633 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14634 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14635 switch (TREE_CODE_CLASS (code))
14637 case tcc_constant:
14638 switch (code)
14640 case STRING_CST:
14641 md5_process_bytes (TREE_STRING_POINTER (expr),
14642 TREE_STRING_LENGTH (expr), ctx);
14643 break;
14644 case COMPLEX_CST:
14645 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14646 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14647 break;
14648 case VECTOR_CST:
14649 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14650 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14651 break;
14652 default:
14653 break;
14655 break;
14656 case tcc_exceptional:
14657 switch (code)
14659 case TREE_LIST:
14660 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14661 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14662 expr = TREE_CHAIN (expr);
14663 goto recursive_label;
14664 break;
14665 case TREE_VEC:
14666 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14667 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14668 break;
14669 default:
14670 break;
14672 break;
14673 case tcc_expression:
14674 case tcc_reference:
14675 case tcc_comparison:
14676 case tcc_unary:
14677 case tcc_binary:
14678 case tcc_statement:
14679 case tcc_vl_exp:
14680 len = TREE_OPERAND_LENGTH (expr);
14681 for (i = 0; i < len; ++i)
14682 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14683 break;
14684 case tcc_declaration:
14685 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14686 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14687 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14689 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14690 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14691 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14692 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14693 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14695 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14696 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14698 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14700 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14701 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14702 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14704 break;
14705 case tcc_type:
14706 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14707 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14708 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14709 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14710 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14711 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14712 if (INTEGRAL_TYPE_P (expr)
14713 || SCALAR_FLOAT_TYPE_P (expr))
14715 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14716 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14718 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14719 if (TREE_CODE (expr) == RECORD_TYPE
14720 || TREE_CODE (expr) == UNION_TYPE
14721 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14722 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14723 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14724 break;
14725 default:
14726 break;
14730 /* Helper function for outputting the checksum of a tree T. When
14731 debugging with gdb, you can "define mynext" to be "next" followed
14732 by "call debug_fold_checksum (op0)", then just trace down till the
14733 outputs differ. */
14735 DEBUG_FUNCTION void
14736 debug_fold_checksum (const_tree t)
14738 int i;
14739 unsigned char checksum[16];
14740 struct md5_ctx ctx;
14741 hash_table <pointer_hash <tree_node> > ht;
14742 ht.create (32);
14744 md5_init_ctx (&ctx);
14745 fold_checksum_tree (t, &ctx, ht);
14746 md5_finish_ctx (&ctx, checksum);
14747 ht.empty ();
14749 for (i = 0; i < 16; i++)
14750 fprintf (stderr, "%d ", checksum[i]);
14752 fprintf (stderr, "\n");
14755 #endif
14757 /* Fold a unary tree expression with code CODE of type TYPE with an
14758 operand OP0. LOC is the location of the resulting expression.
14759 Return a folded expression if successful. Otherwise, return a tree
14760 expression with code CODE of type TYPE with an operand OP0. */
14762 tree
14763 fold_build1_stat_loc (location_t loc,
14764 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14766 tree tem;
14767 #ifdef ENABLE_FOLD_CHECKING
14768 unsigned char checksum_before[16], checksum_after[16];
14769 struct md5_ctx ctx;
14770 hash_table <pointer_hash <tree_node> > ht;
14772 ht.create (32);
14773 md5_init_ctx (&ctx);
14774 fold_checksum_tree (op0, &ctx, ht);
14775 md5_finish_ctx (&ctx, checksum_before);
14776 ht.empty ();
14777 #endif
14779 tem = fold_unary_loc (loc, code, type, op0);
14780 if (!tem)
14781 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14783 #ifdef ENABLE_FOLD_CHECKING
14784 md5_init_ctx (&ctx);
14785 fold_checksum_tree (op0, &ctx, ht);
14786 md5_finish_ctx (&ctx, checksum_after);
14787 ht.dispose ();
14789 if (memcmp (checksum_before, checksum_after, 16))
14790 fold_check_failed (op0, tem);
14791 #endif
14792 return tem;
14795 /* Fold a binary tree expression with code CODE of type TYPE with
14796 operands OP0 and OP1. LOC is the location of the resulting
14797 expression. Return a folded expression if successful. Otherwise,
14798 return a tree expression with code CODE of type TYPE with operands
14799 OP0 and OP1. */
14801 tree
14802 fold_build2_stat_loc (location_t loc,
14803 enum tree_code code, tree type, tree op0, tree op1
14804 MEM_STAT_DECL)
14806 tree tem;
14807 #ifdef ENABLE_FOLD_CHECKING
14808 unsigned char checksum_before_op0[16],
14809 checksum_before_op1[16],
14810 checksum_after_op0[16],
14811 checksum_after_op1[16];
14812 struct md5_ctx ctx;
14813 hash_table <pointer_hash <tree_node> > ht;
14815 ht.create (32);
14816 md5_init_ctx (&ctx);
14817 fold_checksum_tree (op0, &ctx, ht);
14818 md5_finish_ctx (&ctx, checksum_before_op0);
14819 ht.empty ();
14821 md5_init_ctx (&ctx);
14822 fold_checksum_tree (op1, &ctx, ht);
14823 md5_finish_ctx (&ctx, checksum_before_op1);
14824 ht.empty ();
14825 #endif
14827 tem = fold_binary_loc (loc, code, type, op0, op1);
14828 if (!tem)
14829 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14831 #ifdef ENABLE_FOLD_CHECKING
14832 md5_init_ctx (&ctx);
14833 fold_checksum_tree (op0, &ctx, ht);
14834 md5_finish_ctx (&ctx, checksum_after_op0);
14835 ht.empty ();
14837 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14838 fold_check_failed (op0, tem);
14840 md5_init_ctx (&ctx);
14841 fold_checksum_tree (op1, &ctx, ht);
14842 md5_finish_ctx (&ctx, checksum_after_op1);
14843 ht.dispose ();
14845 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14846 fold_check_failed (op1, tem);
14847 #endif
14848 return tem;
14851 /* Fold a ternary tree expression with code CODE of type TYPE with
14852 operands OP0, OP1, and OP2. Return a folded expression if
14853 successful. Otherwise, return a tree expression with code CODE of
14854 type TYPE with operands OP0, OP1, and OP2. */
14856 tree
14857 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14858 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14860 tree tem;
14861 #ifdef ENABLE_FOLD_CHECKING
14862 unsigned char checksum_before_op0[16],
14863 checksum_before_op1[16],
14864 checksum_before_op2[16],
14865 checksum_after_op0[16],
14866 checksum_after_op1[16],
14867 checksum_after_op2[16];
14868 struct md5_ctx ctx;
14869 hash_table <pointer_hash <tree_node> > ht;
14871 ht.create (32);
14872 md5_init_ctx (&ctx);
14873 fold_checksum_tree (op0, &ctx, ht);
14874 md5_finish_ctx (&ctx, checksum_before_op0);
14875 ht.empty ();
14877 md5_init_ctx (&ctx);
14878 fold_checksum_tree (op1, &ctx, ht);
14879 md5_finish_ctx (&ctx, checksum_before_op1);
14880 ht.empty ();
14882 md5_init_ctx (&ctx);
14883 fold_checksum_tree (op2, &ctx, ht);
14884 md5_finish_ctx (&ctx, checksum_before_op2);
14885 ht.empty ();
14886 #endif
14888 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14889 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14890 if (!tem)
14891 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14893 #ifdef ENABLE_FOLD_CHECKING
14894 md5_init_ctx (&ctx);
14895 fold_checksum_tree (op0, &ctx, ht);
14896 md5_finish_ctx (&ctx, checksum_after_op0);
14897 ht.empty ();
14899 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14900 fold_check_failed (op0, tem);
14902 md5_init_ctx (&ctx);
14903 fold_checksum_tree (op1, &ctx, ht);
14904 md5_finish_ctx (&ctx, checksum_after_op1);
14905 ht.empty ();
14907 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14908 fold_check_failed (op1, tem);
14910 md5_init_ctx (&ctx);
14911 fold_checksum_tree (op2, &ctx, ht);
14912 md5_finish_ctx (&ctx, checksum_after_op2);
14913 ht.dispose ();
14915 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14916 fold_check_failed (op2, tem);
14917 #endif
14918 return tem;
14921 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14922 arguments in ARGARRAY, and a null static chain.
14923 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14924 of type TYPE from the given operands as constructed by build_call_array. */
14926 tree
14927 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14928 int nargs, tree *argarray)
14930 tree tem;
14931 #ifdef ENABLE_FOLD_CHECKING
14932 unsigned char checksum_before_fn[16],
14933 checksum_before_arglist[16],
14934 checksum_after_fn[16],
14935 checksum_after_arglist[16];
14936 struct md5_ctx ctx;
14937 hash_table <pointer_hash <tree_node> > ht;
14938 int i;
14940 ht.create (32);
14941 md5_init_ctx (&ctx);
14942 fold_checksum_tree (fn, &ctx, ht);
14943 md5_finish_ctx (&ctx, checksum_before_fn);
14944 ht.empty ();
14946 md5_init_ctx (&ctx);
14947 for (i = 0; i < nargs; i++)
14948 fold_checksum_tree (argarray[i], &ctx, ht);
14949 md5_finish_ctx (&ctx, checksum_before_arglist);
14950 ht.empty ();
14951 #endif
14953 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14955 #ifdef ENABLE_FOLD_CHECKING
14956 md5_init_ctx (&ctx);
14957 fold_checksum_tree (fn, &ctx, ht);
14958 md5_finish_ctx (&ctx, checksum_after_fn);
14959 ht.empty ();
14961 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14962 fold_check_failed (fn, tem);
14964 md5_init_ctx (&ctx);
14965 for (i = 0; i < nargs; i++)
14966 fold_checksum_tree (argarray[i], &ctx, ht);
14967 md5_finish_ctx (&ctx, checksum_after_arglist);
14968 ht.dispose ();
14970 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14971 fold_check_failed (NULL_TREE, tem);
14972 #endif
14973 return tem;
14976 /* Perform constant folding and related simplification of initializer
14977 expression EXPR. These behave identically to "fold_buildN" but ignore
14978 potential run-time traps and exceptions that fold must preserve. */
14980 #define START_FOLD_INIT \
14981 int saved_signaling_nans = flag_signaling_nans;\
14982 int saved_trapping_math = flag_trapping_math;\
14983 int saved_rounding_math = flag_rounding_math;\
14984 int saved_trapv = flag_trapv;\
14985 int saved_folding_initializer = folding_initializer;\
14986 flag_signaling_nans = 0;\
14987 flag_trapping_math = 0;\
14988 flag_rounding_math = 0;\
14989 flag_trapv = 0;\
14990 folding_initializer = 1;
14992 #define END_FOLD_INIT \
14993 flag_signaling_nans = saved_signaling_nans;\
14994 flag_trapping_math = saved_trapping_math;\
14995 flag_rounding_math = saved_rounding_math;\
14996 flag_trapv = saved_trapv;\
14997 folding_initializer = saved_folding_initializer;
14999 tree
15000 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15001 tree type, tree op)
15003 tree result;
15004 START_FOLD_INIT;
15006 result = fold_build1_loc (loc, code, type, op);
15008 END_FOLD_INIT;
15009 return result;
15012 tree
15013 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15014 tree type, tree op0, tree op1)
15016 tree result;
15017 START_FOLD_INIT;
15019 result = fold_build2_loc (loc, code, type, op0, op1);
15021 END_FOLD_INIT;
15022 return result;
15025 tree
15026 fold_build3_initializer_loc (location_t loc, enum tree_code code,
15027 tree type, tree op0, tree op1, tree op2)
15029 tree result;
15030 START_FOLD_INIT;
15032 result = fold_build3_loc (loc, code, type, op0, op1, op2);
15034 END_FOLD_INIT;
15035 return result;
15038 tree
15039 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15040 int nargs, tree *argarray)
15042 tree result;
15043 START_FOLD_INIT;
15045 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15047 END_FOLD_INIT;
15048 return result;
15051 #undef START_FOLD_INIT
15052 #undef END_FOLD_INIT
15054 /* Determine if first argument is a multiple of second argument. Return 0 if
15055 it is not, or we cannot easily determined it to be.
15057 An example of the sort of thing we care about (at this point; this routine
15058 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15059 fold cases do now) is discovering that
15061 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15063 is a multiple of
15065 SAVE_EXPR (J * 8)
15067 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15069 This code also handles discovering that
15071 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15073 is a multiple of 8 so we don't have to worry about dealing with a
15074 possible remainder.
15076 Note that we *look* inside a SAVE_EXPR only to determine how it was
15077 calculated; it is not safe for fold to do much of anything else with the
15078 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15079 at run time. For example, the latter example above *cannot* be implemented
15080 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15081 evaluation time of the original SAVE_EXPR is not necessarily the same at
15082 the time the new expression is evaluated. The only optimization of this
15083 sort that would be valid is changing
15085 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15087 divided by 8 to
15089 SAVE_EXPR (I) * SAVE_EXPR (J)
15091 (where the same SAVE_EXPR (J) is used in the original and the
15092 transformed version). */
15095 multiple_of_p (tree type, const_tree top, const_tree bottom)
15097 if (operand_equal_p (top, bottom, 0))
15098 return 1;
15100 if (TREE_CODE (type) != INTEGER_TYPE)
15101 return 0;
15103 switch (TREE_CODE (top))
15105 case BIT_AND_EXPR:
15106 /* Bitwise and provides a power of two multiple. If the mask is
15107 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15108 if (!integer_pow2p (bottom))
15109 return 0;
15110 /* FALLTHRU */
15112 case MULT_EXPR:
15113 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15114 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15116 case PLUS_EXPR:
15117 case MINUS_EXPR:
15118 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15119 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15121 case LSHIFT_EXPR:
15122 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15124 tree op1, t1;
15126 op1 = TREE_OPERAND (top, 1);
15127 /* const_binop may not detect overflow correctly,
15128 so check for it explicitly here. */
15129 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15130 > TREE_INT_CST_LOW (op1)
15131 && TREE_INT_CST_HIGH (op1) == 0
15132 && 0 != (t1 = fold_convert (type,
15133 const_binop (LSHIFT_EXPR,
15134 size_one_node,
15135 op1)))
15136 && !TREE_OVERFLOW (t1))
15137 return multiple_of_p (type, t1, bottom);
15139 return 0;
15141 case NOP_EXPR:
15142 /* Can't handle conversions from non-integral or wider integral type. */
15143 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15144 || (TYPE_PRECISION (type)
15145 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15146 return 0;
15148 /* .. fall through ... */
15150 case SAVE_EXPR:
15151 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15153 case COND_EXPR:
15154 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15155 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15157 case INTEGER_CST:
15158 if (TREE_CODE (bottom) != INTEGER_CST
15159 || integer_zerop (bottom)
15160 || (TYPE_UNSIGNED (type)
15161 && (tree_int_cst_sgn (top) < 0
15162 || tree_int_cst_sgn (bottom) < 0)))
15163 return 0;
15164 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15165 top, bottom));
15167 default:
15168 return 0;
15172 /* Return true if CODE or TYPE is known to be non-negative. */
15174 static bool
15175 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15177 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15178 && truth_value_p (code))
15179 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15180 have a signed:1 type (where the value is -1 and 0). */
15181 return true;
15182 return false;
15185 /* Return true if (CODE OP0) is known to be non-negative. If the return
15186 value is based on the assumption that signed overflow is undefined,
15187 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15188 *STRICT_OVERFLOW_P. */
15190 bool
15191 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15192 bool *strict_overflow_p)
15194 if (TYPE_UNSIGNED (type))
15195 return true;
15197 switch (code)
15199 case ABS_EXPR:
15200 /* We can't return 1 if flag_wrapv is set because
15201 ABS_EXPR<INT_MIN> = INT_MIN. */
15202 if (!INTEGRAL_TYPE_P (type))
15203 return true;
15204 if (TYPE_OVERFLOW_UNDEFINED (type))
15206 *strict_overflow_p = true;
15207 return true;
15209 break;
15211 case NON_LVALUE_EXPR:
15212 case FLOAT_EXPR:
15213 case FIX_TRUNC_EXPR:
15214 return tree_expr_nonnegative_warnv_p (op0,
15215 strict_overflow_p);
15217 case NOP_EXPR:
15219 tree inner_type = TREE_TYPE (op0);
15220 tree outer_type = type;
15222 if (TREE_CODE (outer_type) == REAL_TYPE)
15224 if (TREE_CODE (inner_type) == REAL_TYPE)
15225 return tree_expr_nonnegative_warnv_p (op0,
15226 strict_overflow_p);
15227 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15229 if (TYPE_UNSIGNED (inner_type))
15230 return true;
15231 return tree_expr_nonnegative_warnv_p (op0,
15232 strict_overflow_p);
15235 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15237 if (TREE_CODE (inner_type) == REAL_TYPE)
15238 return tree_expr_nonnegative_warnv_p (op0,
15239 strict_overflow_p);
15240 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15241 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15242 && TYPE_UNSIGNED (inner_type);
15245 break;
15247 default:
15248 return tree_simple_nonnegative_warnv_p (code, type);
15251 /* We don't know sign of `t', so be conservative and return false. */
15252 return false;
15255 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15256 value is based on the assumption that signed overflow is undefined,
15257 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15258 *STRICT_OVERFLOW_P. */
15260 bool
15261 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15262 tree op1, bool *strict_overflow_p)
15264 if (TYPE_UNSIGNED (type))
15265 return true;
15267 switch (code)
15269 case POINTER_PLUS_EXPR:
15270 case PLUS_EXPR:
15271 if (FLOAT_TYPE_P (type))
15272 return (tree_expr_nonnegative_warnv_p (op0,
15273 strict_overflow_p)
15274 && tree_expr_nonnegative_warnv_p (op1,
15275 strict_overflow_p));
15277 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15278 both unsigned and at least 2 bits shorter than the result. */
15279 if (TREE_CODE (type) == INTEGER_TYPE
15280 && TREE_CODE (op0) == NOP_EXPR
15281 && TREE_CODE (op1) == NOP_EXPR)
15283 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15284 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15285 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15286 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15288 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15289 TYPE_PRECISION (inner2)) + 1;
15290 return prec < TYPE_PRECISION (type);
15293 break;
15295 case MULT_EXPR:
15296 if (FLOAT_TYPE_P (type))
15298 /* x * x for floating point x is always non-negative. */
15299 if (operand_equal_p (op0, op1, 0))
15300 return true;
15301 return (tree_expr_nonnegative_warnv_p (op0,
15302 strict_overflow_p)
15303 && tree_expr_nonnegative_warnv_p (op1,
15304 strict_overflow_p));
15307 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15308 both unsigned and their total bits is shorter than the result. */
15309 if (TREE_CODE (type) == INTEGER_TYPE
15310 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15311 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15313 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15314 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15315 : TREE_TYPE (op0);
15316 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15317 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15318 : TREE_TYPE (op1);
15320 bool unsigned0 = TYPE_UNSIGNED (inner0);
15321 bool unsigned1 = TYPE_UNSIGNED (inner1);
15323 if (TREE_CODE (op0) == INTEGER_CST)
15324 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15326 if (TREE_CODE (op1) == INTEGER_CST)
15327 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15329 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15330 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15332 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15333 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15334 : TYPE_PRECISION (inner0);
15336 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15337 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15338 : TYPE_PRECISION (inner1);
15340 return precision0 + precision1 < TYPE_PRECISION (type);
15343 return false;
15345 case BIT_AND_EXPR:
15346 case MAX_EXPR:
15347 return (tree_expr_nonnegative_warnv_p (op0,
15348 strict_overflow_p)
15349 || tree_expr_nonnegative_warnv_p (op1,
15350 strict_overflow_p));
15352 case BIT_IOR_EXPR:
15353 case BIT_XOR_EXPR:
15354 case MIN_EXPR:
15355 case RDIV_EXPR:
15356 case TRUNC_DIV_EXPR:
15357 case CEIL_DIV_EXPR:
15358 case FLOOR_DIV_EXPR:
15359 case ROUND_DIV_EXPR:
15360 return (tree_expr_nonnegative_warnv_p (op0,
15361 strict_overflow_p)
15362 && tree_expr_nonnegative_warnv_p (op1,
15363 strict_overflow_p));
15365 case TRUNC_MOD_EXPR:
15366 case CEIL_MOD_EXPR:
15367 case FLOOR_MOD_EXPR:
15368 case ROUND_MOD_EXPR:
15369 return tree_expr_nonnegative_warnv_p (op0,
15370 strict_overflow_p);
15371 default:
15372 return tree_simple_nonnegative_warnv_p (code, type);
15375 /* We don't know sign of `t', so be conservative and return false. */
15376 return false;
15379 /* Return true if T is known to be non-negative. If the return
15380 value is based on the assumption that signed overflow is undefined,
15381 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15382 *STRICT_OVERFLOW_P. */
15384 bool
15385 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15387 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15388 return true;
15390 switch (TREE_CODE (t))
15392 case INTEGER_CST:
15393 return tree_int_cst_sgn (t) >= 0;
15395 case REAL_CST:
15396 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15398 case FIXED_CST:
15399 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15401 case COND_EXPR:
15402 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15403 strict_overflow_p)
15404 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15405 strict_overflow_p));
15406 default:
15407 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15408 TREE_TYPE (t));
15410 /* We don't know sign of `t', so be conservative and return false. */
15411 return false;
15414 /* Return true if T is known to be non-negative. If the return
15415 value is based on the assumption that signed overflow is undefined,
15416 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15417 *STRICT_OVERFLOW_P. */
15419 bool
15420 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15421 tree arg0, tree arg1, bool *strict_overflow_p)
15423 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15424 switch (DECL_FUNCTION_CODE (fndecl))
15426 CASE_FLT_FN (BUILT_IN_ACOS):
15427 CASE_FLT_FN (BUILT_IN_ACOSH):
15428 CASE_FLT_FN (BUILT_IN_CABS):
15429 CASE_FLT_FN (BUILT_IN_COSH):
15430 CASE_FLT_FN (BUILT_IN_ERFC):
15431 CASE_FLT_FN (BUILT_IN_EXP):
15432 CASE_FLT_FN (BUILT_IN_EXP10):
15433 CASE_FLT_FN (BUILT_IN_EXP2):
15434 CASE_FLT_FN (BUILT_IN_FABS):
15435 CASE_FLT_FN (BUILT_IN_FDIM):
15436 CASE_FLT_FN (BUILT_IN_HYPOT):
15437 CASE_FLT_FN (BUILT_IN_POW10):
15438 CASE_INT_FN (BUILT_IN_FFS):
15439 CASE_INT_FN (BUILT_IN_PARITY):
15440 CASE_INT_FN (BUILT_IN_POPCOUNT):
15441 case BUILT_IN_BSWAP32:
15442 case BUILT_IN_BSWAP64:
15443 /* Always true. */
15444 return true;
15446 CASE_FLT_FN (BUILT_IN_SQRT):
15447 /* sqrt(-0.0) is -0.0. */
15448 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15449 return true;
15450 return tree_expr_nonnegative_warnv_p (arg0,
15451 strict_overflow_p);
15453 CASE_FLT_FN (BUILT_IN_ASINH):
15454 CASE_FLT_FN (BUILT_IN_ATAN):
15455 CASE_FLT_FN (BUILT_IN_ATANH):
15456 CASE_FLT_FN (BUILT_IN_CBRT):
15457 CASE_FLT_FN (BUILT_IN_CEIL):
15458 CASE_FLT_FN (BUILT_IN_ERF):
15459 CASE_FLT_FN (BUILT_IN_EXPM1):
15460 CASE_FLT_FN (BUILT_IN_FLOOR):
15461 CASE_FLT_FN (BUILT_IN_FMOD):
15462 CASE_FLT_FN (BUILT_IN_FREXP):
15463 CASE_FLT_FN (BUILT_IN_ICEIL):
15464 CASE_FLT_FN (BUILT_IN_IFLOOR):
15465 CASE_FLT_FN (BUILT_IN_IRINT):
15466 CASE_FLT_FN (BUILT_IN_IROUND):
15467 CASE_FLT_FN (BUILT_IN_LCEIL):
15468 CASE_FLT_FN (BUILT_IN_LDEXP):
15469 CASE_FLT_FN (BUILT_IN_LFLOOR):
15470 CASE_FLT_FN (BUILT_IN_LLCEIL):
15471 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15472 CASE_FLT_FN (BUILT_IN_LLRINT):
15473 CASE_FLT_FN (BUILT_IN_LLROUND):
15474 CASE_FLT_FN (BUILT_IN_LRINT):
15475 CASE_FLT_FN (BUILT_IN_LROUND):
15476 CASE_FLT_FN (BUILT_IN_MODF):
15477 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15478 CASE_FLT_FN (BUILT_IN_RINT):
15479 CASE_FLT_FN (BUILT_IN_ROUND):
15480 CASE_FLT_FN (BUILT_IN_SCALB):
15481 CASE_FLT_FN (BUILT_IN_SCALBLN):
15482 CASE_FLT_FN (BUILT_IN_SCALBN):
15483 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15484 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15485 CASE_FLT_FN (BUILT_IN_SINH):
15486 CASE_FLT_FN (BUILT_IN_TANH):
15487 CASE_FLT_FN (BUILT_IN_TRUNC):
15488 /* True if the 1st argument is nonnegative. */
15489 return tree_expr_nonnegative_warnv_p (arg0,
15490 strict_overflow_p);
15492 CASE_FLT_FN (BUILT_IN_FMAX):
15493 /* True if the 1st OR 2nd arguments are nonnegative. */
15494 return (tree_expr_nonnegative_warnv_p (arg0,
15495 strict_overflow_p)
15496 || (tree_expr_nonnegative_warnv_p (arg1,
15497 strict_overflow_p)));
15499 CASE_FLT_FN (BUILT_IN_FMIN):
15500 /* True if the 1st AND 2nd arguments are nonnegative. */
15501 return (tree_expr_nonnegative_warnv_p (arg0,
15502 strict_overflow_p)
15503 && (tree_expr_nonnegative_warnv_p (arg1,
15504 strict_overflow_p)));
15506 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15507 /* True if the 2nd argument is nonnegative. */
15508 return tree_expr_nonnegative_warnv_p (arg1,
15509 strict_overflow_p);
15511 CASE_FLT_FN (BUILT_IN_POWI):
15512 /* True if the 1st argument is nonnegative or the second
15513 argument is an even integer. */
15514 if (TREE_CODE (arg1) == INTEGER_CST
15515 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15516 return true;
15517 return tree_expr_nonnegative_warnv_p (arg0,
15518 strict_overflow_p);
15520 CASE_FLT_FN (BUILT_IN_POW):
15521 /* True if the 1st argument is nonnegative or the second
15522 argument is an even integer valued real. */
15523 if (TREE_CODE (arg1) == REAL_CST)
15525 REAL_VALUE_TYPE c;
15526 HOST_WIDE_INT n;
15528 c = TREE_REAL_CST (arg1);
15529 n = real_to_integer (&c);
15530 if ((n & 1) == 0)
15532 REAL_VALUE_TYPE cint;
15533 real_from_integer (&cint, VOIDmode, n,
15534 n < 0 ? -1 : 0, 0);
15535 if (real_identical (&c, &cint))
15536 return true;
15539 return tree_expr_nonnegative_warnv_p (arg0,
15540 strict_overflow_p);
15542 default:
15543 break;
15545 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15546 type);
15549 /* Return true if T is known to be non-negative. If the return
15550 value is based on the assumption that signed overflow is undefined,
15551 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15552 *STRICT_OVERFLOW_P. */
15554 bool
15555 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15557 enum tree_code code = TREE_CODE (t);
15558 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15559 return true;
15561 switch (code)
15563 case TARGET_EXPR:
15565 tree temp = TARGET_EXPR_SLOT (t);
15566 t = TARGET_EXPR_INITIAL (t);
15568 /* If the initializer is non-void, then it's a normal expression
15569 that will be assigned to the slot. */
15570 if (!VOID_TYPE_P (t))
15571 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15573 /* Otherwise, the initializer sets the slot in some way. One common
15574 way is an assignment statement at the end of the initializer. */
15575 while (1)
15577 if (TREE_CODE (t) == BIND_EXPR)
15578 t = expr_last (BIND_EXPR_BODY (t));
15579 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15580 || TREE_CODE (t) == TRY_CATCH_EXPR)
15581 t = expr_last (TREE_OPERAND (t, 0));
15582 else if (TREE_CODE (t) == STATEMENT_LIST)
15583 t = expr_last (t);
15584 else
15585 break;
15587 if (TREE_CODE (t) == MODIFY_EXPR
15588 && TREE_OPERAND (t, 0) == temp)
15589 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15590 strict_overflow_p);
15592 return false;
15595 case CALL_EXPR:
15597 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15598 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15600 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15601 get_callee_fndecl (t),
15602 arg0,
15603 arg1,
15604 strict_overflow_p);
15606 case COMPOUND_EXPR:
15607 case MODIFY_EXPR:
15608 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15609 strict_overflow_p);
15610 case BIND_EXPR:
15611 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15612 strict_overflow_p);
15613 case SAVE_EXPR:
15614 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15615 strict_overflow_p);
15617 default:
15618 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15619 TREE_TYPE (t));
15622 /* We don't know sign of `t', so be conservative and return false. */
15623 return false;
15626 /* Return true if T is known to be non-negative. If the return
15627 value is based on the assumption that signed overflow is undefined,
15628 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15629 *STRICT_OVERFLOW_P. */
15631 bool
15632 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15634 enum tree_code code;
15635 if (t == error_mark_node)
15636 return false;
15638 code = TREE_CODE (t);
15639 switch (TREE_CODE_CLASS (code))
15641 case tcc_binary:
15642 case tcc_comparison:
15643 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15644 TREE_TYPE (t),
15645 TREE_OPERAND (t, 0),
15646 TREE_OPERAND (t, 1),
15647 strict_overflow_p);
15649 case tcc_unary:
15650 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15651 TREE_TYPE (t),
15652 TREE_OPERAND (t, 0),
15653 strict_overflow_p);
15655 case tcc_constant:
15656 case tcc_declaration:
15657 case tcc_reference:
15658 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15660 default:
15661 break;
15664 switch (code)
15666 case TRUTH_AND_EXPR:
15667 case TRUTH_OR_EXPR:
15668 case TRUTH_XOR_EXPR:
15669 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15670 TREE_TYPE (t),
15671 TREE_OPERAND (t, 0),
15672 TREE_OPERAND (t, 1),
15673 strict_overflow_p);
15674 case TRUTH_NOT_EXPR:
15675 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15676 TREE_TYPE (t),
15677 TREE_OPERAND (t, 0),
15678 strict_overflow_p);
15680 case COND_EXPR:
15681 case CONSTRUCTOR:
15682 case OBJ_TYPE_REF:
15683 case ASSERT_EXPR:
15684 case ADDR_EXPR:
15685 case WITH_SIZE_EXPR:
15686 case SSA_NAME:
15687 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15689 default:
15690 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15694 /* Return true if `t' is known to be non-negative. Handle warnings
15695 about undefined signed overflow. */
15697 bool
15698 tree_expr_nonnegative_p (tree t)
15700 bool ret, strict_overflow_p;
15702 strict_overflow_p = false;
15703 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15704 if (strict_overflow_p)
15705 fold_overflow_warning (("assuming signed overflow does not occur when "
15706 "determining that expression is always "
15707 "non-negative"),
15708 WARN_STRICT_OVERFLOW_MISC);
15709 return ret;
15713 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15714 For floating point we further ensure that T is not denormal.
15715 Similar logic is present in nonzero_address in rtlanal.h.
15717 If the return value is based on the assumption that signed overflow
15718 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15719 change *STRICT_OVERFLOW_P. */
15721 bool
15722 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15723 bool *strict_overflow_p)
15725 switch (code)
15727 case ABS_EXPR:
15728 return tree_expr_nonzero_warnv_p (op0,
15729 strict_overflow_p);
15731 case NOP_EXPR:
15733 tree inner_type = TREE_TYPE (op0);
15734 tree outer_type = type;
15736 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15737 && tree_expr_nonzero_warnv_p (op0,
15738 strict_overflow_p));
15740 break;
15742 case NON_LVALUE_EXPR:
15743 return tree_expr_nonzero_warnv_p (op0,
15744 strict_overflow_p);
15746 default:
15747 break;
15750 return false;
15753 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15754 For floating point we further ensure that T is not denormal.
15755 Similar logic is present in nonzero_address in rtlanal.h.
15757 If the return value is based on the assumption that signed overflow
15758 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15759 change *STRICT_OVERFLOW_P. */
15761 bool
15762 tree_binary_nonzero_warnv_p (enum tree_code code,
15763 tree type,
15764 tree op0,
15765 tree op1, bool *strict_overflow_p)
15767 bool sub_strict_overflow_p;
15768 switch (code)
15770 case POINTER_PLUS_EXPR:
15771 case PLUS_EXPR:
15772 if (TYPE_OVERFLOW_UNDEFINED (type))
15774 /* With the presence of negative values it is hard
15775 to say something. */
15776 sub_strict_overflow_p = false;
15777 if (!tree_expr_nonnegative_warnv_p (op0,
15778 &sub_strict_overflow_p)
15779 || !tree_expr_nonnegative_warnv_p (op1,
15780 &sub_strict_overflow_p))
15781 return false;
15782 /* One of operands must be positive and the other non-negative. */
15783 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15784 overflows, on a twos-complement machine the sum of two
15785 nonnegative numbers can never be zero. */
15786 return (tree_expr_nonzero_warnv_p (op0,
15787 strict_overflow_p)
15788 || tree_expr_nonzero_warnv_p (op1,
15789 strict_overflow_p));
15791 break;
15793 case MULT_EXPR:
15794 if (TYPE_OVERFLOW_UNDEFINED (type))
15796 if (tree_expr_nonzero_warnv_p (op0,
15797 strict_overflow_p)
15798 && tree_expr_nonzero_warnv_p (op1,
15799 strict_overflow_p))
15801 *strict_overflow_p = true;
15802 return true;
15805 break;
15807 case MIN_EXPR:
15808 sub_strict_overflow_p = false;
15809 if (tree_expr_nonzero_warnv_p (op0,
15810 &sub_strict_overflow_p)
15811 && tree_expr_nonzero_warnv_p (op1,
15812 &sub_strict_overflow_p))
15814 if (sub_strict_overflow_p)
15815 *strict_overflow_p = true;
15817 break;
15819 case MAX_EXPR:
15820 sub_strict_overflow_p = false;
15821 if (tree_expr_nonzero_warnv_p (op0,
15822 &sub_strict_overflow_p))
15824 if (sub_strict_overflow_p)
15825 *strict_overflow_p = true;
15827 /* When both operands are nonzero, then MAX must be too. */
15828 if (tree_expr_nonzero_warnv_p (op1,
15829 strict_overflow_p))
15830 return true;
15832 /* MAX where operand 0 is positive is positive. */
15833 return tree_expr_nonnegative_warnv_p (op0,
15834 strict_overflow_p);
15836 /* MAX where operand 1 is positive is positive. */
15837 else if (tree_expr_nonzero_warnv_p (op1,
15838 &sub_strict_overflow_p)
15839 && tree_expr_nonnegative_warnv_p (op1,
15840 &sub_strict_overflow_p))
15842 if (sub_strict_overflow_p)
15843 *strict_overflow_p = true;
15844 return true;
15846 break;
15848 case BIT_IOR_EXPR:
15849 return (tree_expr_nonzero_warnv_p (op1,
15850 strict_overflow_p)
15851 || tree_expr_nonzero_warnv_p (op0,
15852 strict_overflow_p));
15854 default:
15855 break;
15858 return false;
15861 /* Return true when T is an address and is known to be nonzero.
15862 For floating point we further ensure that T is not denormal.
15863 Similar logic is present in nonzero_address in rtlanal.h.
15865 If the return value is based on the assumption that signed overflow
15866 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15867 change *STRICT_OVERFLOW_P. */
15869 bool
15870 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15872 bool sub_strict_overflow_p;
15873 switch (TREE_CODE (t))
15875 case INTEGER_CST:
15876 return !integer_zerop (t);
15878 case ADDR_EXPR:
15880 tree base = TREE_OPERAND (t, 0);
15881 if (!DECL_P (base))
15882 base = get_base_address (base);
15884 if (!base)
15885 return false;
15887 /* Weak declarations may link to NULL. Other things may also be NULL
15888 so protect with -fdelete-null-pointer-checks; but not variables
15889 allocated on the stack. */
15890 if (DECL_P (base)
15891 && (flag_delete_null_pointer_checks
15892 || (DECL_CONTEXT (base)
15893 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15894 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15895 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15897 /* Constants are never weak. */
15898 if (CONSTANT_CLASS_P (base))
15899 return true;
15901 return false;
15904 case COND_EXPR:
15905 sub_strict_overflow_p = false;
15906 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15907 &sub_strict_overflow_p)
15908 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15909 &sub_strict_overflow_p))
15911 if (sub_strict_overflow_p)
15912 *strict_overflow_p = true;
15913 return true;
15915 break;
15917 default:
15918 break;
15920 return false;
15923 /* Return true when T is an address and is known to be nonzero.
15924 For floating point we further ensure that T is not denormal.
15925 Similar logic is present in nonzero_address in rtlanal.h.
15927 If the return value is based on the assumption that signed overflow
15928 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15929 change *STRICT_OVERFLOW_P. */
15931 bool
15932 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15934 tree type = TREE_TYPE (t);
15935 enum tree_code code;
15937 /* Doing something useful for floating point would need more work. */
15938 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15939 return false;
15941 code = TREE_CODE (t);
15942 switch (TREE_CODE_CLASS (code))
15944 case tcc_unary:
15945 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15946 strict_overflow_p);
15947 case tcc_binary:
15948 case tcc_comparison:
15949 return tree_binary_nonzero_warnv_p (code, type,
15950 TREE_OPERAND (t, 0),
15951 TREE_OPERAND (t, 1),
15952 strict_overflow_p);
15953 case tcc_constant:
15954 case tcc_declaration:
15955 case tcc_reference:
15956 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15958 default:
15959 break;
15962 switch (code)
15964 case TRUTH_NOT_EXPR:
15965 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15966 strict_overflow_p);
15968 case TRUTH_AND_EXPR:
15969 case TRUTH_OR_EXPR:
15970 case TRUTH_XOR_EXPR:
15971 return tree_binary_nonzero_warnv_p (code, type,
15972 TREE_OPERAND (t, 0),
15973 TREE_OPERAND (t, 1),
15974 strict_overflow_p);
15976 case COND_EXPR:
15977 case CONSTRUCTOR:
15978 case OBJ_TYPE_REF:
15979 case ASSERT_EXPR:
15980 case ADDR_EXPR:
15981 case WITH_SIZE_EXPR:
15982 case SSA_NAME:
15983 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15985 case COMPOUND_EXPR:
15986 case MODIFY_EXPR:
15987 case BIND_EXPR:
15988 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15989 strict_overflow_p);
15991 case SAVE_EXPR:
15992 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15993 strict_overflow_p);
15995 case CALL_EXPR:
15996 return alloca_call_p (t);
15998 default:
15999 break;
16001 return false;
16004 /* Return true when T is an address and is known to be nonzero.
16005 Handle warnings about undefined signed overflow. */
16007 bool
16008 tree_expr_nonzero_p (tree t)
16010 bool ret, strict_overflow_p;
16012 strict_overflow_p = false;
16013 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
16014 if (strict_overflow_p)
16015 fold_overflow_warning (("assuming signed overflow does not occur when "
16016 "determining that expression is always "
16017 "non-zero"),
16018 WARN_STRICT_OVERFLOW_MISC);
16019 return ret;
16022 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16023 attempt to fold the expression to a constant without modifying TYPE,
16024 OP0 or OP1.
16026 If the expression could be simplified to a constant, then return
16027 the constant. If the expression would not be simplified to a
16028 constant, then return NULL_TREE. */
16030 tree
16031 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16033 tree tem = fold_binary (code, type, op0, op1);
16034 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16037 /* Given the components of a unary expression CODE, TYPE and OP0,
16038 attempt to fold the expression to a constant without modifying
16039 TYPE or OP0.
16041 If the expression could be simplified to a constant, then return
16042 the constant. If the expression would not be simplified to a
16043 constant, then return NULL_TREE. */
16045 tree
16046 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16048 tree tem = fold_unary (code, type, op0);
16049 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16052 /* If EXP represents referencing an element in a constant string
16053 (either via pointer arithmetic or array indexing), return the
16054 tree representing the value accessed, otherwise return NULL. */
16056 tree
16057 fold_read_from_constant_string (tree exp)
16059 if ((TREE_CODE (exp) == INDIRECT_REF
16060 || TREE_CODE (exp) == ARRAY_REF)
16061 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16063 tree exp1 = TREE_OPERAND (exp, 0);
16064 tree index;
16065 tree string;
16066 location_t loc = EXPR_LOCATION (exp);
16068 if (TREE_CODE (exp) == INDIRECT_REF)
16069 string = string_constant (exp1, &index);
16070 else
16072 tree low_bound = array_ref_low_bound (exp);
16073 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16075 /* Optimize the special-case of a zero lower bound.
16077 We convert the low_bound to sizetype to avoid some problems
16078 with constant folding. (E.g. suppose the lower bound is 1,
16079 and its mode is QI. Without the conversion,l (ARRAY
16080 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16081 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16082 if (! integer_zerop (low_bound))
16083 index = size_diffop_loc (loc, index,
16084 fold_convert_loc (loc, sizetype, low_bound));
16086 string = exp1;
16089 if (string
16090 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16091 && TREE_CODE (string) == STRING_CST
16092 && TREE_CODE (index) == INTEGER_CST
16093 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16094 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16095 == MODE_INT)
16096 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16097 return build_int_cst_type (TREE_TYPE (exp),
16098 (TREE_STRING_POINTER (string)
16099 [TREE_INT_CST_LOW (index)]));
16101 return NULL;
16104 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16105 an integer constant, real, or fixed-point constant.
16107 TYPE is the type of the result. */
16109 static tree
16110 fold_negate_const (tree arg0, tree type)
16112 tree t = NULL_TREE;
16114 switch (TREE_CODE (arg0))
16116 case INTEGER_CST:
16118 double_int val = tree_to_double_int (arg0);
16119 bool overflow;
16120 val = val.neg_with_overflow (&overflow);
16121 t = force_fit_type_double (type, val, 1,
16122 (overflow | TREE_OVERFLOW (arg0))
16123 && !TYPE_UNSIGNED (type));
16124 break;
16127 case REAL_CST:
16128 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16129 break;
16131 case FIXED_CST:
16133 FIXED_VALUE_TYPE f;
16134 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16135 &(TREE_FIXED_CST (arg0)), NULL,
16136 TYPE_SATURATING (type));
16137 t = build_fixed (type, f);
16138 /* Propagate overflow flags. */
16139 if (overflow_p | TREE_OVERFLOW (arg0))
16140 TREE_OVERFLOW (t) = 1;
16141 break;
16144 default:
16145 gcc_unreachable ();
16148 return t;
16151 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16152 an integer constant or real constant.
16154 TYPE is the type of the result. */
16156 tree
16157 fold_abs_const (tree arg0, tree type)
16159 tree t = NULL_TREE;
16161 switch (TREE_CODE (arg0))
16163 case INTEGER_CST:
16165 double_int val = tree_to_double_int (arg0);
16167 /* If the value is unsigned or non-negative, then the absolute value
16168 is the same as the ordinary value. */
16169 if (TYPE_UNSIGNED (type)
16170 || !val.is_negative ())
16171 t = arg0;
16173 /* If the value is negative, then the absolute value is
16174 its negation. */
16175 else
16177 bool overflow;
16178 val = val.neg_with_overflow (&overflow);
16179 t = force_fit_type_double (type, val, -1,
16180 overflow | TREE_OVERFLOW (arg0));
16183 break;
16185 case REAL_CST:
16186 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16187 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16188 else
16189 t = arg0;
16190 break;
16192 default:
16193 gcc_unreachable ();
16196 return t;
16199 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16200 constant. TYPE is the type of the result. */
16202 static tree
16203 fold_not_const (const_tree arg0, tree type)
16205 double_int val;
16207 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16209 val = ~tree_to_double_int (arg0);
16210 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16213 /* Given CODE, a relational operator, the target type, TYPE and two
16214 constant operands OP0 and OP1, return the result of the
16215 relational operation. If the result is not a compile time
16216 constant, then return NULL_TREE. */
16218 static tree
16219 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16221 int result, invert;
16223 /* From here on, the only cases we handle are when the result is
16224 known to be a constant. */
16226 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16228 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16229 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16231 /* Handle the cases where either operand is a NaN. */
16232 if (real_isnan (c0) || real_isnan (c1))
16234 switch (code)
16236 case EQ_EXPR:
16237 case ORDERED_EXPR:
16238 result = 0;
16239 break;
16241 case NE_EXPR:
16242 case UNORDERED_EXPR:
16243 case UNLT_EXPR:
16244 case UNLE_EXPR:
16245 case UNGT_EXPR:
16246 case UNGE_EXPR:
16247 case UNEQ_EXPR:
16248 result = 1;
16249 break;
16251 case LT_EXPR:
16252 case LE_EXPR:
16253 case GT_EXPR:
16254 case GE_EXPR:
16255 case LTGT_EXPR:
16256 if (flag_trapping_math)
16257 return NULL_TREE;
16258 result = 0;
16259 break;
16261 default:
16262 gcc_unreachable ();
16265 return constant_boolean_node (result, type);
16268 return constant_boolean_node (real_compare (code, c0, c1), type);
16271 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16273 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16274 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16275 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16278 /* Handle equality/inequality of complex constants. */
16279 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16281 tree rcond = fold_relational_const (code, type,
16282 TREE_REALPART (op0),
16283 TREE_REALPART (op1));
16284 tree icond = fold_relational_const (code, type,
16285 TREE_IMAGPART (op0),
16286 TREE_IMAGPART (op1));
16287 if (code == EQ_EXPR)
16288 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16289 else if (code == NE_EXPR)
16290 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16291 else
16292 return NULL_TREE;
16295 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16297 unsigned count = VECTOR_CST_NELTS (op0);
16298 tree *elts = XALLOCAVEC (tree, count);
16299 gcc_assert (VECTOR_CST_NELTS (op1) == count
16300 && TYPE_VECTOR_SUBPARTS (type) == count);
16302 for (unsigned i = 0; i < count; i++)
16304 tree elem_type = TREE_TYPE (type);
16305 tree elem0 = VECTOR_CST_ELT (op0, i);
16306 tree elem1 = VECTOR_CST_ELT (op1, i);
16308 tree tem = fold_relational_const (code, elem_type,
16309 elem0, elem1);
16311 if (tem == NULL_TREE)
16312 return NULL_TREE;
16314 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16317 return build_vector (type, elts);
16320 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16322 To compute GT, swap the arguments and do LT.
16323 To compute GE, do LT and invert the result.
16324 To compute LE, swap the arguments, do LT and invert the result.
16325 To compute NE, do EQ and invert the result.
16327 Therefore, the code below must handle only EQ and LT. */
16329 if (code == LE_EXPR || code == GT_EXPR)
16331 tree tem = op0;
16332 op0 = op1;
16333 op1 = tem;
16334 code = swap_tree_comparison (code);
16337 /* Note that it is safe to invert for real values here because we
16338 have already handled the one case that it matters. */
16340 invert = 0;
16341 if (code == NE_EXPR || code == GE_EXPR)
16343 invert = 1;
16344 code = invert_tree_comparison (code, false);
16347 /* Compute a result for LT or EQ if args permit;
16348 Otherwise return T. */
16349 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16351 if (code == EQ_EXPR)
16352 result = tree_int_cst_equal (op0, op1);
16353 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16354 result = INT_CST_LT_UNSIGNED (op0, op1);
16355 else
16356 result = INT_CST_LT (op0, op1);
16358 else
16359 return NULL_TREE;
16361 if (invert)
16362 result ^= 1;
16363 return constant_boolean_node (result, type);
16366 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16367 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16368 itself. */
16370 tree
16371 fold_build_cleanup_point_expr (tree type, tree expr)
16373 /* If the expression does not have side effects then we don't have to wrap
16374 it with a cleanup point expression. */
16375 if (!TREE_SIDE_EFFECTS (expr))
16376 return expr;
16378 /* If the expression is a return, check to see if the expression inside the
16379 return has no side effects or the right hand side of the modify expression
16380 inside the return. If either don't have side effects set we don't need to
16381 wrap the expression in a cleanup point expression. Note we don't check the
16382 left hand side of the modify because it should always be a return decl. */
16383 if (TREE_CODE (expr) == RETURN_EXPR)
16385 tree op = TREE_OPERAND (expr, 0);
16386 if (!op || !TREE_SIDE_EFFECTS (op))
16387 return expr;
16388 op = TREE_OPERAND (op, 1);
16389 if (!TREE_SIDE_EFFECTS (op))
16390 return expr;
16393 return build1 (CLEANUP_POINT_EXPR, type, expr);
16396 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16397 of an indirection through OP0, or NULL_TREE if no simplification is
16398 possible. */
16400 tree
16401 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16403 tree sub = op0;
16404 tree subtype;
16406 STRIP_NOPS (sub);
16407 subtype = TREE_TYPE (sub);
16408 if (!POINTER_TYPE_P (subtype))
16409 return NULL_TREE;
16411 if (TREE_CODE (sub) == ADDR_EXPR)
16413 tree op = TREE_OPERAND (sub, 0);
16414 tree optype = TREE_TYPE (op);
16415 /* *&CONST_DECL -> to the value of the const decl. */
16416 if (TREE_CODE (op) == CONST_DECL)
16417 return DECL_INITIAL (op);
16418 /* *&p => p; make sure to handle *&"str"[cst] here. */
16419 if (type == optype)
16421 tree fop = fold_read_from_constant_string (op);
16422 if (fop)
16423 return fop;
16424 else
16425 return op;
16427 /* *(foo *)&fooarray => fooarray[0] */
16428 else if (TREE_CODE (optype) == ARRAY_TYPE
16429 && type == TREE_TYPE (optype)
16430 && (!in_gimple_form
16431 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16433 tree type_domain = TYPE_DOMAIN (optype);
16434 tree min_val = size_zero_node;
16435 if (type_domain && TYPE_MIN_VALUE (type_domain))
16436 min_val = TYPE_MIN_VALUE (type_domain);
16437 if (in_gimple_form
16438 && TREE_CODE (min_val) != INTEGER_CST)
16439 return NULL_TREE;
16440 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16441 NULL_TREE, NULL_TREE);
16443 /* *(foo *)&complexfoo => __real__ complexfoo */
16444 else if (TREE_CODE (optype) == COMPLEX_TYPE
16445 && type == TREE_TYPE (optype))
16446 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16447 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16448 else if (TREE_CODE (optype) == VECTOR_TYPE
16449 && type == TREE_TYPE (optype))
16451 tree part_width = TYPE_SIZE (type);
16452 tree index = bitsize_int (0);
16453 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16457 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16458 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16460 tree op00 = TREE_OPERAND (sub, 0);
16461 tree op01 = TREE_OPERAND (sub, 1);
16463 STRIP_NOPS (op00);
16464 if (TREE_CODE (op00) == ADDR_EXPR)
16466 tree op00type;
16467 op00 = TREE_OPERAND (op00, 0);
16468 op00type = TREE_TYPE (op00);
16470 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16471 if (TREE_CODE (op00type) == VECTOR_TYPE
16472 && type == TREE_TYPE (op00type))
16474 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16475 tree part_width = TYPE_SIZE (type);
16476 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16477 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16478 tree index = bitsize_int (indexi);
16480 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16481 return fold_build3_loc (loc,
16482 BIT_FIELD_REF, type, op00,
16483 part_width, index);
16486 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16487 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16488 && type == TREE_TYPE (op00type))
16490 tree size = TYPE_SIZE_UNIT (type);
16491 if (tree_int_cst_equal (size, op01))
16492 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16494 /* ((foo *)&fooarray)[1] => fooarray[1] */
16495 else if (TREE_CODE (op00type) == ARRAY_TYPE
16496 && type == TREE_TYPE (op00type))
16498 tree type_domain = TYPE_DOMAIN (op00type);
16499 tree min_val = size_zero_node;
16500 if (type_domain && TYPE_MIN_VALUE (type_domain))
16501 min_val = TYPE_MIN_VALUE (type_domain);
16502 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16503 TYPE_SIZE_UNIT (type));
16504 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16505 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16506 NULL_TREE, NULL_TREE);
16511 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16512 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16513 && type == TREE_TYPE (TREE_TYPE (subtype))
16514 && (!in_gimple_form
16515 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16517 tree type_domain;
16518 tree min_val = size_zero_node;
16519 sub = build_fold_indirect_ref_loc (loc, sub);
16520 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16521 if (type_domain && TYPE_MIN_VALUE (type_domain))
16522 min_val = TYPE_MIN_VALUE (type_domain);
16523 if (in_gimple_form
16524 && TREE_CODE (min_val) != INTEGER_CST)
16525 return NULL_TREE;
16526 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16527 NULL_TREE);
16530 return NULL_TREE;
16533 /* Builds an expression for an indirection through T, simplifying some
16534 cases. */
16536 tree
16537 build_fold_indirect_ref_loc (location_t loc, tree t)
16539 tree type = TREE_TYPE (TREE_TYPE (t));
16540 tree sub = fold_indirect_ref_1 (loc, type, t);
16542 if (sub)
16543 return sub;
16545 return build1_loc (loc, INDIRECT_REF, type, t);
16548 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16550 tree
16551 fold_indirect_ref_loc (location_t loc, tree t)
16553 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16555 if (sub)
16556 return sub;
16557 else
16558 return t;
16561 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16562 whose result is ignored. The type of the returned tree need not be
16563 the same as the original expression. */
16565 tree
16566 fold_ignored_result (tree t)
16568 if (!TREE_SIDE_EFFECTS (t))
16569 return integer_zero_node;
16571 for (;;)
16572 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16574 case tcc_unary:
16575 t = TREE_OPERAND (t, 0);
16576 break;
16578 case tcc_binary:
16579 case tcc_comparison:
16580 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16581 t = TREE_OPERAND (t, 0);
16582 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16583 t = TREE_OPERAND (t, 1);
16584 else
16585 return t;
16586 break;
16588 case tcc_expression:
16589 switch (TREE_CODE (t))
16591 case COMPOUND_EXPR:
16592 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16593 return t;
16594 t = TREE_OPERAND (t, 0);
16595 break;
16597 case COND_EXPR:
16598 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16599 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16600 return t;
16601 t = TREE_OPERAND (t, 0);
16602 break;
16604 default:
16605 return t;
16607 break;
16609 default:
16610 return t;
16614 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16615 This can only be applied to objects of a sizetype. */
16617 tree
16618 round_up_loc (location_t loc, tree value, int divisor)
16620 tree div = NULL_TREE;
16622 gcc_assert (divisor > 0);
16623 if (divisor == 1)
16624 return value;
16626 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16627 have to do anything. Only do this when we are not given a const,
16628 because in that case, this check is more expensive than just
16629 doing it. */
16630 if (TREE_CODE (value) != INTEGER_CST)
16632 div = build_int_cst (TREE_TYPE (value), divisor);
16634 if (multiple_of_p (TREE_TYPE (value), value, div))
16635 return value;
16638 /* If divisor is a power of two, simplify this to bit manipulation. */
16639 if (divisor == (divisor & -divisor))
16641 if (TREE_CODE (value) == INTEGER_CST)
16643 double_int val = tree_to_double_int (value);
16644 bool overflow_p;
16646 if ((val.low & (divisor - 1)) == 0)
16647 return value;
16649 overflow_p = TREE_OVERFLOW (value);
16650 val.low &= ~(divisor - 1);
16651 val.low += divisor;
16652 if (val.low == 0)
16654 val.high++;
16655 if (val.high == 0)
16656 overflow_p = true;
16659 return force_fit_type_double (TREE_TYPE (value), val,
16660 -1, overflow_p);
16662 else
16664 tree t;
16666 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16667 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16668 t = build_int_cst (TREE_TYPE (value), -divisor);
16669 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16672 else
16674 if (!div)
16675 div = build_int_cst (TREE_TYPE (value), divisor);
16676 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16677 value = size_binop_loc (loc, MULT_EXPR, value, div);
16680 return value;
16683 /* Likewise, but round down. */
16685 tree
16686 round_down_loc (location_t loc, tree value, int divisor)
16688 tree div = NULL_TREE;
16690 gcc_assert (divisor > 0);
16691 if (divisor == 1)
16692 return value;
16694 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16695 have to do anything. Only do this when we are not given a const,
16696 because in that case, this check is more expensive than just
16697 doing it. */
16698 if (TREE_CODE (value) != INTEGER_CST)
16700 div = build_int_cst (TREE_TYPE (value), divisor);
16702 if (multiple_of_p (TREE_TYPE (value), value, div))
16703 return value;
16706 /* If divisor is a power of two, simplify this to bit manipulation. */
16707 if (divisor == (divisor & -divisor))
16709 tree t;
16711 t = build_int_cst (TREE_TYPE (value), -divisor);
16712 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16714 else
16716 if (!div)
16717 div = build_int_cst (TREE_TYPE (value), divisor);
16718 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16719 value = size_binop_loc (loc, MULT_EXPR, value, div);
16722 return value;
16725 /* Returns the pointer to the base of the object addressed by EXP and
16726 extracts the information about the offset of the access, storing it
16727 to PBITPOS and POFFSET. */
16729 static tree
16730 split_address_to_core_and_offset (tree exp,
16731 HOST_WIDE_INT *pbitpos, tree *poffset)
16733 tree core;
16734 enum machine_mode mode;
16735 int unsignedp, volatilep;
16736 HOST_WIDE_INT bitsize;
16737 location_t loc = EXPR_LOCATION (exp);
16739 if (TREE_CODE (exp) == ADDR_EXPR)
16741 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16742 poffset, &mode, &unsignedp, &volatilep,
16743 false);
16744 core = build_fold_addr_expr_loc (loc, core);
16746 else
16748 core = exp;
16749 *pbitpos = 0;
16750 *poffset = NULL_TREE;
16753 return core;
16756 /* Returns true if addresses of E1 and E2 differ by a constant, false
16757 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16759 bool
16760 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16762 tree core1, core2;
16763 HOST_WIDE_INT bitpos1, bitpos2;
16764 tree toffset1, toffset2, tdiff, type;
16766 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16767 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16769 if (bitpos1 % BITS_PER_UNIT != 0
16770 || bitpos2 % BITS_PER_UNIT != 0
16771 || !operand_equal_p (core1, core2, 0))
16772 return false;
16774 if (toffset1 && toffset2)
16776 type = TREE_TYPE (toffset1);
16777 if (type != TREE_TYPE (toffset2))
16778 toffset2 = fold_convert (type, toffset2);
16780 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16781 if (!cst_and_fits_in_hwi (tdiff))
16782 return false;
16784 *diff = int_cst_value (tdiff);
16786 else if (toffset1 || toffset2)
16788 /* If only one of the offsets is non-constant, the difference cannot
16789 be a constant. */
16790 return false;
16792 else
16793 *diff = 0;
16795 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16796 return true;
16799 /* Simplify the floating point expression EXP when the sign of the
16800 result is not significant. Return NULL_TREE if no simplification
16801 is possible. */
16803 tree
16804 fold_strip_sign_ops (tree exp)
16806 tree arg0, arg1;
16807 location_t loc = EXPR_LOCATION (exp);
16809 switch (TREE_CODE (exp))
16811 case ABS_EXPR:
16812 case NEGATE_EXPR:
16813 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16814 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16816 case MULT_EXPR:
16817 case RDIV_EXPR:
16818 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16819 return NULL_TREE;
16820 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16821 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16822 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16823 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16824 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16825 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16826 break;
16828 case COMPOUND_EXPR:
16829 arg0 = TREE_OPERAND (exp, 0);
16830 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16831 if (arg1)
16832 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16833 break;
16835 case COND_EXPR:
16836 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16837 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16838 if (arg0 || arg1)
16839 return fold_build3_loc (loc,
16840 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16841 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16842 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16843 break;
16845 case CALL_EXPR:
16847 const enum built_in_function fcode = builtin_mathfn_code (exp);
16848 switch (fcode)
16850 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16851 /* Strip copysign function call, return the 1st argument. */
16852 arg0 = CALL_EXPR_ARG (exp, 0);
16853 arg1 = CALL_EXPR_ARG (exp, 1);
16854 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16856 default:
16857 /* Strip sign ops from the argument of "odd" math functions. */
16858 if (negate_mathfn_p (fcode))
16860 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16861 if (arg0)
16862 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16864 break;
16867 break;
16869 default:
16870 break;
16872 return NULL_TREE;