Merge trunk version 195330 into gupc branch.
[official-gcc.git] / gcc / fold-const.c
blobe35f986dbe4905a1c80bc190f27d5826fd052fd5
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "realmpfr.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "diagnostic-core.h"
55 #include "intl.h"
56 #include "ggc.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
59 #include "md5.h"
60 #include "gimple.h"
61 #include "tree-flow.h"
63 /* Nonzero if we are folding constants inside an initializer; zero
64 otherwise. */
65 int folding_initializer = 0;
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code {
71 COMPCODE_FALSE = 0,
72 COMPCODE_LT = 1,
73 COMPCODE_EQ = 2,
74 COMPCODE_LE = 3,
75 COMPCODE_GT = 4,
76 COMPCODE_LTGT = 5,
77 COMPCODE_GE = 6,
78 COMPCODE_ORD = 7,
79 COMPCODE_UNORD = 8,
80 COMPCODE_UNLT = 9,
81 COMPCODE_UNEQ = 10,
82 COMPCODE_UNLE = 11,
83 COMPCODE_UNGT = 12,
84 COMPCODE_NE = 13,
85 COMPCODE_UNGE = 14,
86 COMPCODE_TRUE = 15
89 static bool negate_mathfn_p (enum built_in_function);
90 static bool negate_expr_p (tree);
91 static tree negate_expr (tree);
92 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
93 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
94 static tree const_binop (enum tree_code, tree, tree);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
101 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (location_t, tree, tree,
103 HOST_WIDE_INT, HOST_WIDE_INT, int);
104 static tree optimize_bit_field_compare (location_t, enum tree_code,
105 tree, tree, tree);
106 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
107 HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (const_tree, int);
111 static tree sign_bit_p (tree, const_tree);
112 static int simple_operand_p (const_tree);
113 static bool simple_operand_p_2 (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree optimize_minmax_comparison (location_t, enum tree_code,
121 tree, tree, tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
124 static tree fold_binary_op_with_conditional_arg (location_t,
125 enum tree_code, tree,
126 tree, tree,
127 tree, tree, int);
128 static tree fold_mathfn_compare (location_t,
129 enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (const_tree, const_tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (const_tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_convert_const (enum tree_code, tree, tree);
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140 Otherwise, return LOC. */
142 static location_t
143 expr_location_or (tree t, location_t loc)
145 location_t tloc = EXPR_LOCATION (t);
146 return tloc == UNKNOWN_LOCATION ? loc : tloc;
149 /* Similar to protected_set_expr_location, but never modify x in place,
150 if location can and needs to be set, unshare it. */
152 static inline tree
153 protected_set_expr_location_unshare (tree x, location_t loc)
155 if (CAN_HAVE_LOCATION_P (x)
156 && EXPR_LOCATION (x) != loc
157 && !(TREE_CODE (x) == SAVE_EXPR
158 || TREE_CODE (x) == TARGET_EXPR
159 || TREE_CODE (x) == BIND_EXPR))
161 x = copy_node (x);
162 SET_EXPR_LOCATION (x, loc);
164 return x;
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168 of type CODE and returns the quotient.
169 Otherwise returns NULL_TREE. */
171 tree
172 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
174 double_int quo, rem;
175 int uns;
177 /* The sign of the division is according to operand two, that
178 does the correct thing for POINTER_PLUS_EXPR where we want
179 a signed division. */
180 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
182 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
183 uns, code, &rem);
185 if (rem.is_zero ())
186 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
188 return NULL_TREE;
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
198 used. */
200 static int fold_deferring_overflow_warnings;
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
207 static const char* fold_deferred_overflow_warning;
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
212 static enum warn_strict_overflow_code fold_deferred_overflow_code;
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
217 void
218 fold_defer_overflow_warnings (void)
220 ++fold_deferring_overflow_warnings;
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
230 deferred code. */
232 void
233 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
235 const char *warnmsg;
236 location_t locus;
238 gcc_assert (fold_deferring_overflow_warnings > 0);
239 --fold_deferring_overflow_warnings;
240 if (fold_deferring_overflow_warnings > 0)
242 if (fold_deferred_overflow_warning != NULL
243 && code != 0
244 && code < (int) fold_deferred_overflow_code)
245 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
246 return;
249 warnmsg = fold_deferred_overflow_warning;
250 fold_deferred_overflow_warning = NULL;
252 if (!issue || warnmsg == NULL)
253 return;
255 if (gimple_no_warning_p (stmt))
256 return;
258 /* Use the smallest code level when deciding to issue the
259 warning. */
260 if (code == 0 || code > (int) fold_deferred_overflow_code)
261 code = fold_deferred_overflow_code;
263 if (!issue_strict_overflow_warning (code))
264 return;
266 if (stmt == NULL)
267 locus = input_location;
268 else
269 locus = gimple_location (stmt);
270 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
273 /* Stop deferring overflow warnings, ignoring any deferred
274 warnings. */
276 void
277 fold_undefer_and_ignore_overflow_warnings (void)
279 fold_undefer_overflow_warnings (false, NULL, 0);
282 /* Whether we are deferring overflow warnings. */
284 bool
285 fold_deferring_overflow_warnings_p (void)
287 return fold_deferring_overflow_warnings > 0;
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
293 static void
294 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
296 if (fold_deferring_overflow_warnings > 0)
298 if (fold_deferred_overflow_warning == NULL
299 || wc < fold_deferred_overflow_code)
301 fold_deferred_overflow_warning = gmsgid;
302 fold_deferred_overflow_code = wc;
305 else if (issue_strict_overflow_warning (wc))
306 warning (OPT_Wstrict_overflow, gmsgid);
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
312 static bool
313 negate_mathfn_p (enum built_in_function code)
315 switch (code)
317 CASE_FLT_FN (BUILT_IN_ASIN):
318 CASE_FLT_FN (BUILT_IN_ASINH):
319 CASE_FLT_FN (BUILT_IN_ATAN):
320 CASE_FLT_FN (BUILT_IN_ATANH):
321 CASE_FLT_FN (BUILT_IN_CASIN):
322 CASE_FLT_FN (BUILT_IN_CASINH):
323 CASE_FLT_FN (BUILT_IN_CATAN):
324 CASE_FLT_FN (BUILT_IN_CATANH):
325 CASE_FLT_FN (BUILT_IN_CBRT):
326 CASE_FLT_FN (BUILT_IN_CPROJ):
327 CASE_FLT_FN (BUILT_IN_CSIN):
328 CASE_FLT_FN (BUILT_IN_CSINH):
329 CASE_FLT_FN (BUILT_IN_CTAN):
330 CASE_FLT_FN (BUILT_IN_CTANH):
331 CASE_FLT_FN (BUILT_IN_ERF):
332 CASE_FLT_FN (BUILT_IN_LLROUND):
333 CASE_FLT_FN (BUILT_IN_LROUND):
334 CASE_FLT_FN (BUILT_IN_ROUND):
335 CASE_FLT_FN (BUILT_IN_SIN):
336 CASE_FLT_FN (BUILT_IN_SINH):
337 CASE_FLT_FN (BUILT_IN_TAN):
338 CASE_FLT_FN (BUILT_IN_TANH):
339 CASE_FLT_FN (BUILT_IN_TRUNC):
340 return true;
342 CASE_FLT_FN (BUILT_IN_LLRINT):
343 CASE_FLT_FN (BUILT_IN_LRINT):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT):
345 CASE_FLT_FN (BUILT_IN_RINT):
346 return !flag_rounding_math;
348 default:
349 break;
351 return false;
354 /* Check whether we may negate an integer constant T without causing
355 overflow. */
357 bool
358 may_negate_without_overflow_p (const_tree t)
360 unsigned HOST_WIDE_INT val;
361 unsigned int prec;
362 tree type;
364 gcc_assert (TREE_CODE (t) == INTEGER_CST);
366 type = TREE_TYPE (t);
367 if (TYPE_UNSIGNED (type))
368 return false;
370 prec = TYPE_PRECISION (type);
371 if (prec > HOST_BITS_PER_WIDE_INT)
373 if (TREE_INT_CST_LOW (t) != 0)
374 return true;
375 prec -= HOST_BITS_PER_WIDE_INT;
376 val = TREE_INT_CST_HIGH (t);
378 else
379 val = TREE_INT_CST_LOW (t);
380 if (prec < HOST_BITS_PER_WIDE_INT)
381 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
382 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
388 static bool
389 negate_expr_p (tree t)
391 tree type;
393 if (t == 0)
394 return false;
396 type = TREE_TYPE (t);
398 STRIP_SIGN_NOPS (t);
399 switch (TREE_CODE (t))
401 case INTEGER_CST:
402 if (TYPE_OVERFLOW_WRAPS (type))
403 return true;
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
407 case BIT_NOT_EXPR:
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
411 case FIXED_CST:
412 case NEGATE_EXPR:
413 return true;
415 case REAL_CST:
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
420 case COMPLEX_CST:
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
433 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
434 return false;
435 /* -(A + B) -> (-B) - A. */
436 if (negate_expr_p (TREE_OPERAND (t, 1))
437 && reorder_operands_p (TREE_OPERAND (t, 0),
438 TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
446 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
447 && reorder_operands_p (TREE_OPERAND (t, 0),
448 TREE_OPERAND (t, 1));
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (TREE_TYPE (t)))
452 break;
454 /* Fall through. */
456 case RDIV_EXPR:
457 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
458 return negate_expr_p (TREE_OPERAND (t, 1))
459 || negate_expr_p (TREE_OPERAND (t, 0));
460 break;
462 case TRUNC_DIV_EXPR:
463 case ROUND_DIV_EXPR:
464 case FLOOR_DIV_EXPR:
465 case CEIL_DIV_EXPR:
466 case EXACT_DIV_EXPR:
467 /* In general we can't negate A / B, because if A is INT_MIN and
468 B is 1, we may turn this into INT_MIN / -1 which is undefined
469 and actually traps on some architectures. But if overflow is
470 undefined, we can negate, because - (INT_MIN / 1) is an
471 overflow. */
472 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
473 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
474 break;
475 return negate_expr_p (TREE_OPERAND (t, 1))
476 || negate_expr_p (TREE_OPERAND (t, 0));
478 case NOP_EXPR:
479 /* Negate -((double)float) as (double)(-float). */
480 if (TREE_CODE (type) == REAL_TYPE)
482 tree tem = strip_float_extensions (t);
483 if (tem != t)
484 return negate_expr_p (tem);
486 break;
488 case CALL_EXPR:
489 /* Negate -f(x) as f(-x). */
490 if (negate_mathfn_p (builtin_mathfn_code (t)))
491 return negate_expr_p (CALL_EXPR_ARG (t, 0));
492 break;
494 case RSHIFT_EXPR:
495 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
496 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
498 tree op1 = TREE_OPERAND (t, 1);
499 if (TREE_INT_CST_HIGH (op1) == 0
500 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
501 == TREE_INT_CST_LOW (op1))
502 return true;
504 break;
506 default:
507 break;
509 return false;
512 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
513 simplification is possible.
514 If negate_expr_p would return true for T, NULL_TREE will never be
515 returned. */
517 static tree
518 fold_negate_expr (location_t loc, tree t)
520 tree type = TREE_TYPE (t);
521 tree tem;
523 switch (TREE_CODE (t))
525 /* Convert - (~A) to A + 1. */
526 case BIT_NOT_EXPR:
527 if (INTEGRAL_TYPE_P (type))
528 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
529 build_int_cst (type, 1));
530 break;
532 case INTEGER_CST:
533 tem = fold_negate_const (t, type);
534 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
535 || !TYPE_OVERFLOW_TRAPS (type))
536 return tem;
537 break;
539 case REAL_CST:
540 tem = fold_negate_const (t, type);
541 /* Two's complement FP formats, such as c4x, may overflow. */
542 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
543 return tem;
544 break;
546 case FIXED_CST:
547 tem = fold_negate_const (t, type);
548 return tem;
550 case COMPLEX_CST:
552 tree rpart = negate_expr (TREE_REALPART (t));
553 tree ipart = negate_expr (TREE_IMAGPART (t));
555 if ((TREE_CODE (rpart) == REAL_CST
556 && TREE_CODE (ipart) == REAL_CST)
557 || (TREE_CODE (rpart) == INTEGER_CST
558 && TREE_CODE (ipart) == INTEGER_CST))
559 return build_complex (type, rpart, ipart);
561 break;
563 case COMPLEX_EXPR:
564 if (negate_expr_p (t))
565 return fold_build2_loc (loc, COMPLEX_EXPR, type,
566 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
567 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
568 break;
570 case CONJ_EXPR:
571 if (negate_expr_p (t))
572 return fold_build1_loc (loc, CONJ_EXPR, type,
573 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
574 break;
576 case NEGATE_EXPR:
577 return TREE_OPERAND (t, 0);
579 case PLUS_EXPR:
580 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
581 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
583 /* -(A + B) -> (-B) - A. */
584 if (negate_expr_p (TREE_OPERAND (t, 1))
585 && reorder_operands_p (TREE_OPERAND (t, 0),
586 TREE_OPERAND (t, 1)))
588 tem = negate_expr (TREE_OPERAND (t, 1));
589 return fold_build2_loc (loc, MINUS_EXPR, type,
590 tem, TREE_OPERAND (t, 0));
593 /* -(A + B) -> (-A) - B. */
594 if (negate_expr_p (TREE_OPERAND (t, 0)))
596 tem = negate_expr (TREE_OPERAND (t, 0));
597 return fold_build2_loc (loc, MINUS_EXPR, type,
598 tem, TREE_OPERAND (t, 1));
601 break;
603 case MINUS_EXPR:
604 /* - (A - B) -> B - A */
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
606 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
607 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
608 return fold_build2_loc (loc, MINUS_EXPR, type,
609 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
610 break;
612 case MULT_EXPR:
613 if (TYPE_UNSIGNED (type))
614 break;
616 /* Fall through. */
618 case RDIV_EXPR:
619 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
621 tem = TREE_OPERAND (t, 1);
622 if (negate_expr_p (tem))
623 return fold_build2_loc (loc, TREE_CODE (t), type,
624 TREE_OPERAND (t, 0), negate_expr (tem));
625 tem = TREE_OPERAND (t, 0);
626 if (negate_expr_p (tem))
627 return fold_build2_loc (loc, TREE_CODE (t), type,
628 negate_expr (tem), TREE_OPERAND (t, 1));
630 break;
632 case TRUNC_DIV_EXPR:
633 case ROUND_DIV_EXPR:
634 case FLOOR_DIV_EXPR:
635 case CEIL_DIV_EXPR:
636 case EXACT_DIV_EXPR:
637 /* In general we can't negate A / B, because if A is INT_MIN and
638 B is 1, we may turn this into INT_MIN / -1 which is undefined
639 and actually traps on some architectures. But if overflow is
640 undefined, we can negate, because - (INT_MIN / 1) is an
641 overflow. */
642 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
644 const char * const warnmsg = G_("assuming signed overflow does not "
645 "occur when negating a division");
646 tem = TREE_OPERAND (t, 1);
647 if (negate_expr_p (tem))
649 if (INTEGRAL_TYPE_P (type)
650 && (TREE_CODE (tem) != INTEGER_CST
651 || integer_onep (tem)))
652 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
653 return fold_build2_loc (loc, TREE_CODE (t), type,
654 TREE_OPERAND (t, 0), negate_expr (tem));
656 tem = TREE_OPERAND (t, 0);
657 if (negate_expr_p (tem))
659 if (INTEGRAL_TYPE_P (type)
660 && (TREE_CODE (tem) != INTEGER_CST
661 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
662 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
663 return fold_build2_loc (loc, TREE_CODE (t), type,
664 negate_expr (tem), TREE_OPERAND (t, 1));
667 break;
669 case NOP_EXPR:
670 /* Convert -((double)float) into (double)(-float). */
671 if (TREE_CODE (type) == REAL_TYPE)
673 tem = strip_float_extensions (t);
674 if (tem != t && negate_expr_p (tem))
675 return fold_convert_loc (loc, type, negate_expr (tem));
677 break;
679 case CALL_EXPR:
680 /* Negate -f(x) as f(-x). */
681 if (negate_mathfn_p (builtin_mathfn_code (t))
682 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
684 tree fndecl, arg;
686 fndecl = get_callee_fndecl (t);
687 arg = negate_expr (CALL_EXPR_ARG (t, 0));
688 return build_call_expr_loc (loc, fndecl, 1, arg);
690 break;
692 case RSHIFT_EXPR:
693 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
694 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
696 tree op1 = TREE_OPERAND (t, 1);
697 if (TREE_INT_CST_HIGH (op1) == 0
698 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
699 == TREE_INT_CST_LOW (op1))
701 tree ntype = TYPE_UNSIGNED (type)
702 ? signed_type_for (type)
703 : unsigned_type_for (type);
704 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
705 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
706 return fold_convert_loc (loc, type, temp);
709 break;
711 default:
712 break;
715 return NULL_TREE;
718 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
719 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
720 return NULL_TREE. */
722 static tree
723 negate_expr (tree t)
725 tree type, tem;
726 location_t loc;
728 if (t == NULL_TREE)
729 return NULL_TREE;
731 loc = EXPR_LOCATION (t);
732 type = TREE_TYPE (t);
733 STRIP_SIGN_NOPS (t);
735 tem = fold_negate_expr (loc, t);
736 if (!tem)
737 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
738 return fold_convert_loc (loc, type, tem);
741 /* Split a tree IN into a constant, literal and variable parts that could be
742 combined with CODE to make IN. "constant" means an expression with
743 TREE_CONSTANT but that isn't an actual constant. CODE must be a
744 commutative arithmetic operation. Store the constant part into *CONP,
745 the literal in *LITP and return the variable part. If a part isn't
746 present, set it to null. If the tree does not decompose in this way,
747 return the entire tree as the variable part and the other parts as null.
749 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
750 case, we negate an operand that was subtracted. Except if it is a
751 literal for which we use *MINUS_LITP instead.
753 If NEGATE_P is true, we are negating all of IN, again except a literal
754 for which we use *MINUS_LITP instead.
756 If IN is itself a literal or constant, return it as appropriate.
758 Note that we do not guarantee that any of the three values will be the
759 same type as IN, but they will have the same signedness and mode. */
761 static tree
762 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
763 tree *minus_litp, int negate_p)
765 tree var = 0;
767 *conp = 0;
768 *litp = 0;
769 *minus_litp = 0;
771 /* Strip any conversions that don't change the machine mode or signedness. */
772 STRIP_SIGN_NOPS (in);
774 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
775 || TREE_CODE (in) == FIXED_CST)
776 *litp = in;
777 else if (TREE_CODE (in) == code
778 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
779 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
780 /* We can associate addition and subtraction together (even
781 though the C standard doesn't say so) for integers because
782 the value is not affected. For reals, the value might be
783 affected, so we can't. */
784 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
785 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
787 tree op0 = TREE_OPERAND (in, 0);
788 tree op1 = TREE_OPERAND (in, 1);
789 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
790 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
792 /* First see if either of the operands is a literal, then a constant. */
793 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
794 || TREE_CODE (op0) == FIXED_CST)
795 *litp = op0, op0 = 0;
796 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
797 || TREE_CODE (op1) == FIXED_CST)
798 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
800 if (op0 != 0 && TREE_CONSTANT (op0))
801 *conp = op0, op0 = 0;
802 else if (op1 != 0 && TREE_CONSTANT (op1))
803 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
805 /* If we haven't dealt with either operand, this is not a case we can
806 decompose. Otherwise, VAR is either of the ones remaining, if any. */
807 if (op0 != 0 && op1 != 0)
808 var = in;
809 else if (op0 != 0)
810 var = op0;
811 else
812 var = op1, neg_var_p = neg1_p;
814 /* Now do any needed negations. */
815 if (neg_litp_p)
816 *minus_litp = *litp, *litp = 0;
817 if (neg_conp_p)
818 *conp = negate_expr (*conp);
819 if (neg_var_p)
820 var = negate_expr (var);
822 else if (TREE_CODE (in) == BIT_NOT_EXPR
823 && code == PLUS_EXPR)
825 /* -X - 1 is folded to ~X, undo that here. */
826 *minus_litp = build_one_cst (TREE_TYPE (in));
827 var = negate_expr (TREE_OPERAND (in, 0));
829 else if (TREE_CONSTANT (in))
830 *conp = in;
831 else
832 var = in;
834 if (negate_p)
836 if (*litp)
837 *minus_litp = *litp, *litp = 0;
838 else if (*minus_litp)
839 *litp = *minus_litp, *minus_litp = 0;
840 *conp = negate_expr (*conp);
841 var = negate_expr (var);
844 return var;
847 /* Re-associate trees split by the above function. T1 and T2 are
848 either expressions to associate or null. Return the new
849 expression, if any. LOC is the location of the new expression. If
850 we build an operation, do it in TYPE and with CODE. */
852 static tree
853 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
855 if (t1 == 0)
856 return t2;
857 else if (t2 == 0)
858 return t1;
860 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
861 try to fold this since we will have infinite recursion. But do
862 deal with any NEGATE_EXPRs. */
863 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
864 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
866 if (code == PLUS_EXPR)
868 if (TREE_CODE (t1) == NEGATE_EXPR)
869 return build2_loc (loc, MINUS_EXPR, type,
870 fold_convert_loc (loc, type, t2),
871 fold_convert_loc (loc, type,
872 TREE_OPERAND (t1, 0)));
873 else if (TREE_CODE (t2) == NEGATE_EXPR)
874 return build2_loc (loc, MINUS_EXPR, type,
875 fold_convert_loc (loc, type, t1),
876 fold_convert_loc (loc, type,
877 TREE_OPERAND (t2, 0)));
878 else if (integer_zerop (t2))
879 return fold_convert_loc (loc, type, t1);
881 else if (code == MINUS_EXPR)
883 if (integer_zerop (t2))
884 return fold_convert_loc (loc, type, t1);
887 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
888 fold_convert_loc (loc, type, t2));
891 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
892 fold_convert_loc (loc, type, t2));
895 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
896 for use in int_const_binop, size_binop and size_diffop. */
898 static bool
899 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
901 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
902 return false;
903 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
904 return false;
906 switch (code)
908 case LSHIFT_EXPR:
909 case RSHIFT_EXPR:
910 case LROTATE_EXPR:
911 case RROTATE_EXPR:
912 return true;
914 default:
915 break;
918 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
919 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
920 && TYPE_MODE (type1) == TYPE_MODE (type2);
924 /* Combine two integer constants ARG1 and ARG2 under operation CODE
925 to produce a new constant. Return NULL_TREE if we don't know how
926 to evaluate CODE at compile-time. */
928 static tree
929 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
930 int overflowable)
932 double_int op1, op2, res, tmp;
933 tree t;
934 tree type = TREE_TYPE (arg1);
935 bool uns = TYPE_UNSIGNED (type);
936 bool overflow = false;
938 op1 = tree_to_double_int (arg1);
939 op2 = tree_to_double_int (arg2);
941 switch (code)
943 case BIT_IOR_EXPR:
944 res = op1 | op2;
945 break;
947 case BIT_XOR_EXPR:
948 res = op1 ^ op2;
949 break;
951 case BIT_AND_EXPR:
952 res = op1 & op2;
953 break;
955 case RSHIFT_EXPR:
956 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
957 break;
959 case LSHIFT_EXPR:
960 /* It's unclear from the C standard whether shifts can overflow.
961 The following code ignores overflow; perhaps a C standard
962 interpretation ruling is needed. */
963 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
964 break;
966 case RROTATE_EXPR:
967 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
968 break;
970 case LROTATE_EXPR:
971 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
972 break;
974 case PLUS_EXPR:
975 res = op1.add_with_sign (op2, false, &overflow);
976 break;
978 case MINUS_EXPR:
979 res = op1.sub_with_overflow (op2, &overflow);
980 break;
982 case MULT_EXPR:
983 res = op1.mul_with_sign (op2, false, &overflow);
984 break;
986 case MULT_HIGHPART_EXPR:
987 /* ??? Need quad precision, or an additional shift operand
988 to the multiply primitive, to handle very large highparts. */
989 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
990 return NULL_TREE;
991 tmp = op1 - op2;
992 res = tmp.rshift (TYPE_PRECISION (type), TYPE_PRECISION (type), !uns);
993 break;
995 case TRUNC_DIV_EXPR:
996 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
997 case EXACT_DIV_EXPR:
998 /* This is a shortcut for a common special case. */
999 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1000 && !TREE_OVERFLOW (arg1)
1001 && !TREE_OVERFLOW (arg2)
1002 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1004 if (code == CEIL_DIV_EXPR)
1005 op1.low += op2.low - 1;
1007 res.low = op1.low / op2.low, res.high = 0;
1008 break;
1011 /* ... fall through ... */
1013 case ROUND_DIV_EXPR:
1014 if (op2.is_zero ())
1015 return NULL_TREE;
1016 if (op2.is_one ())
1018 res = op1;
1019 break;
1021 if (op1 == op2 && !op1.is_zero ())
1023 res = double_int_one;
1024 break;
1026 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1027 break;
1029 case TRUNC_MOD_EXPR:
1030 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1031 /* This is a shortcut for a common special case. */
1032 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1033 && !TREE_OVERFLOW (arg1)
1034 && !TREE_OVERFLOW (arg2)
1035 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1037 if (code == CEIL_MOD_EXPR)
1038 op1.low += op2.low - 1;
1039 res.low = op1.low % op2.low, res.high = 0;
1040 break;
1043 /* ... fall through ... */
1045 case ROUND_MOD_EXPR:
1046 if (op2.is_zero ())
1047 return NULL_TREE;
1048 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1049 break;
1051 case MIN_EXPR:
1052 res = op1.min (op2, uns);
1053 break;
1055 case MAX_EXPR:
1056 res = op1.max (op2, uns);
1057 break;
1059 default:
1060 return NULL_TREE;
1063 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1064 (!uns && overflow)
1065 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1067 return t;
1070 tree
1071 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1073 return int_const_binop_1 (code, arg1, arg2, 1);
1076 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1077 constant. We assume ARG1 and ARG2 have the same data type, or at least
1078 are the same kind of constant and the same machine mode. Return zero if
1079 combining the constants is not allowed in the current operating mode. */
1081 static tree
1082 const_binop (enum tree_code code, tree arg1, tree arg2)
1084 /* Sanity check for the recursive cases. */
1085 if (!arg1 || !arg2)
1086 return NULL_TREE;
1088 STRIP_NOPS (arg1);
1089 STRIP_NOPS (arg2);
1091 if (TREE_CODE (arg1) == INTEGER_CST)
1092 return int_const_binop (code, arg1, arg2);
1094 if (TREE_CODE (arg1) == REAL_CST)
1096 enum machine_mode mode;
1097 REAL_VALUE_TYPE d1;
1098 REAL_VALUE_TYPE d2;
1099 REAL_VALUE_TYPE value;
1100 REAL_VALUE_TYPE result;
1101 bool inexact;
1102 tree t, type;
1104 /* The following codes are handled by real_arithmetic. */
1105 switch (code)
1107 case PLUS_EXPR:
1108 case MINUS_EXPR:
1109 case MULT_EXPR:
1110 case RDIV_EXPR:
1111 case MIN_EXPR:
1112 case MAX_EXPR:
1113 break;
1115 default:
1116 return NULL_TREE;
1119 d1 = TREE_REAL_CST (arg1);
1120 d2 = TREE_REAL_CST (arg2);
1122 type = TREE_TYPE (arg1);
1123 mode = TYPE_MODE (type);
1125 /* Don't perform operation if we honor signaling NaNs and
1126 either operand is a NaN. */
1127 if (HONOR_SNANS (mode)
1128 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1129 return NULL_TREE;
1131 /* Don't perform operation if it would raise a division
1132 by zero exception. */
1133 if (code == RDIV_EXPR
1134 && REAL_VALUES_EQUAL (d2, dconst0)
1135 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1136 return NULL_TREE;
1138 /* If either operand is a NaN, just return it. Otherwise, set up
1139 for floating-point trap; we return an overflow. */
1140 if (REAL_VALUE_ISNAN (d1))
1141 return arg1;
1142 else if (REAL_VALUE_ISNAN (d2))
1143 return arg2;
1145 inexact = real_arithmetic (&value, code, &d1, &d2);
1146 real_convert (&result, mode, &value);
1148 /* Don't constant fold this floating point operation if
1149 the result has overflowed and flag_trapping_math. */
1150 if (flag_trapping_math
1151 && MODE_HAS_INFINITIES (mode)
1152 && REAL_VALUE_ISINF (result)
1153 && !REAL_VALUE_ISINF (d1)
1154 && !REAL_VALUE_ISINF (d2))
1155 return NULL_TREE;
1157 /* Don't constant fold this floating point operation if the
1158 result may dependent upon the run-time rounding mode and
1159 flag_rounding_math is set, or if GCC's software emulation
1160 is unable to accurately represent the result. */
1161 if ((flag_rounding_math
1162 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1163 && (inexact || !real_identical (&result, &value)))
1164 return NULL_TREE;
1166 t = build_real (type, result);
1168 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1169 return t;
1172 if (TREE_CODE (arg1) == FIXED_CST)
1174 FIXED_VALUE_TYPE f1;
1175 FIXED_VALUE_TYPE f2;
1176 FIXED_VALUE_TYPE result;
1177 tree t, type;
1178 int sat_p;
1179 bool overflow_p;
1181 /* The following codes are handled by fixed_arithmetic. */
1182 switch (code)
1184 case PLUS_EXPR:
1185 case MINUS_EXPR:
1186 case MULT_EXPR:
1187 case TRUNC_DIV_EXPR:
1188 f2 = TREE_FIXED_CST (arg2);
1189 break;
1191 case LSHIFT_EXPR:
1192 case RSHIFT_EXPR:
1193 f2.data.high = TREE_INT_CST_HIGH (arg2);
1194 f2.data.low = TREE_INT_CST_LOW (arg2);
1195 f2.mode = SImode;
1196 break;
1198 default:
1199 return NULL_TREE;
1202 f1 = TREE_FIXED_CST (arg1);
1203 type = TREE_TYPE (arg1);
1204 sat_p = TYPE_SATURATING (type);
1205 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1206 t = build_fixed (type, result);
1207 /* Propagate overflow flags. */
1208 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1209 TREE_OVERFLOW (t) = 1;
1210 return t;
1213 if (TREE_CODE (arg1) == COMPLEX_CST)
1215 tree type = TREE_TYPE (arg1);
1216 tree r1 = TREE_REALPART (arg1);
1217 tree i1 = TREE_IMAGPART (arg1);
1218 tree r2 = TREE_REALPART (arg2);
1219 tree i2 = TREE_IMAGPART (arg2);
1220 tree real, imag;
1222 switch (code)
1224 case PLUS_EXPR:
1225 case MINUS_EXPR:
1226 real = const_binop (code, r1, r2);
1227 imag = const_binop (code, i1, i2);
1228 break;
1230 case MULT_EXPR:
1231 if (COMPLEX_FLOAT_TYPE_P (type))
1232 return do_mpc_arg2 (arg1, arg2, type,
1233 /* do_nonfinite= */ folding_initializer,
1234 mpc_mul);
1236 real = const_binop (MINUS_EXPR,
1237 const_binop (MULT_EXPR, r1, r2),
1238 const_binop (MULT_EXPR, i1, i2));
1239 imag = const_binop (PLUS_EXPR,
1240 const_binop (MULT_EXPR, r1, i2),
1241 const_binop (MULT_EXPR, i1, r2));
1242 break;
1244 case RDIV_EXPR:
1245 if (COMPLEX_FLOAT_TYPE_P (type))
1246 return do_mpc_arg2 (arg1, arg2, type,
1247 /* do_nonfinite= */ folding_initializer,
1248 mpc_div);
1249 /* Fallthru ... */
1250 case TRUNC_DIV_EXPR:
1251 case CEIL_DIV_EXPR:
1252 case FLOOR_DIV_EXPR:
1253 case ROUND_DIV_EXPR:
1254 if (flag_complex_method == 0)
1256 /* Keep this algorithm in sync with
1257 tree-complex.c:expand_complex_div_straight().
1259 Expand complex division to scalars, straightforward algorithm.
1260 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1261 t = br*br + bi*bi
1263 tree magsquared
1264 = const_binop (PLUS_EXPR,
1265 const_binop (MULT_EXPR, r2, r2),
1266 const_binop (MULT_EXPR, i2, i2));
1267 tree t1
1268 = const_binop (PLUS_EXPR,
1269 const_binop (MULT_EXPR, r1, r2),
1270 const_binop (MULT_EXPR, i1, i2));
1271 tree t2
1272 = const_binop (MINUS_EXPR,
1273 const_binop (MULT_EXPR, i1, r2),
1274 const_binop (MULT_EXPR, r1, i2));
1276 real = const_binop (code, t1, magsquared);
1277 imag = const_binop (code, t2, magsquared);
1279 else
1281 /* Keep this algorithm in sync with
1282 tree-complex.c:expand_complex_div_wide().
1284 Expand complex division to scalars, modified algorithm to minimize
1285 overflow with wide input ranges. */
1286 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1287 fold_abs_const (r2, TREE_TYPE (type)),
1288 fold_abs_const (i2, TREE_TYPE (type)));
1290 if (integer_nonzerop (compare))
1292 /* In the TRUE branch, we compute
1293 ratio = br/bi;
1294 div = (br * ratio) + bi;
1295 tr = (ar * ratio) + ai;
1296 ti = (ai * ratio) - ar;
1297 tr = tr / div;
1298 ti = ti / div; */
1299 tree ratio = const_binop (code, r2, i2);
1300 tree div = const_binop (PLUS_EXPR, i2,
1301 const_binop (MULT_EXPR, r2, ratio));
1302 real = const_binop (MULT_EXPR, r1, ratio);
1303 real = const_binop (PLUS_EXPR, real, i1);
1304 real = const_binop (code, real, div);
1306 imag = const_binop (MULT_EXPR, i1, ratio);
1307 imag = const_binop (MINUS_EXPR, imag, r1);
1308 imag = const_binop (code, imag, div);
1310 else
1312 /* In the FALSE branch, we compute
1313 ratio = d/c;
1314 divisor = (d * ratio) + c;
1315 tr = (b * ratio) + a;
1316 ti = b - (a * ratio);
1317 tr = tr / div;
1318 ti = ti / div; */
1319 tree ratio = const_binop (code, i2, r2);
1320 tree div = const_binop (PLUS_EXPR, r2,
1321 const_binop (MULT_EXPR, i2, ratio));
1323 real = const_binop (MULT_EXPR, i1, ratio);
1324 real = const_binop (PLUS_EXPR, real, r1);
1325 real = const_binop (code, real, div);
1327 imag = const_binop (MULT_EXPR, r1, ratio);
1328 imag = const_binop (MINUS_EXPR, i1, imag);
1329 imag = const_binop (code, imag, div);
1332 break;
1334 default:
1335 return NULL_TREE;
1338 if (real && imag)
1339 return build_complex (type, real, imag);
1342 if (TREE_CODE (arg1) == VECTOR_CST
1343 && TREE_CODE (arg2) == VECTOR_CST)
1345 tree type = TREE_TYPE(arg1);
1346 int count = TYPE_VECTOR_SUBPARTS (type), i;
1347 tree *elts = XALLOCAVEC (tree, count);
1349 for (i = 0; i < count; i++)
1351 tree elem1 = VECTOR_CST_ELT (arg1, i);
1352 tree elem2 = VECTOR_CST_ELT (arg2, i);
1354 elts[i] = const_binop (code, elem1, elem2);
1356 /* It is possible that const_binop cannot handle the given
1357 code and return NULL_TREE */
1358 if(elts[i] == NULL_TREE)
1359 return NULL_TREE;
1362 return build_vector (type, elts);
1364 return NULL_TREE;
1367 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1368 indicates which particular sizetype to create. */
1370 tree
1371 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1373 return build_int_cst (sizetype_tab[(int) kind], number);
1376 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1377 is a tree code. The type of the result is taken from the operands.
1378 Both must be equivalent integer types, ala int_binop_types_match_p.
1379 If the operands are constant, so is the result. */
1381 tree
1382 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1384 tree type = TREE_TYPE (arg0);
1386 if (arg0 == error_mark_node || arg1 == error_mark_node)
1387 return error_mark_node;
1389 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1390 TREE_TYPE (arg1)));
1392 /* Handle the special case of two integer constants faster. */
1393 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1395 /* And some specific cases even faster than that. */
1396 if (code == PLUS_EXPR)
1398 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1399 return arg1;
1400 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1401 return arg0;
1403 else if (code == MINUS_EXPR)
1405 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1406 return arg0;
1408 else if (code == MULT_EXPR)
1410 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1411 return arg1;
1414 /* Handle general case of two integer constants. For sizetype
1415 constant calculations we always want to know about overflow,
1416 even in the unsigned case. */
1417 return int_const_binop_1 (code, arg0, arg1, -1);
1420 return fold_build2_loc (loc, code, type, arg0, arg1);
1423 /* Given two values, either both of sizetype or both of bitsizetype,
1424 compute the difference between the two values. Return the value
1425 in signed type corresponding to the type of the operands. */
1427 tree
1428 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1430 tree type = TREE_TYPE (arg0);
1431 tree ctype;
1433 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1434 TREE_TYPE (arg1)));
1436 /* If the type is already signed, just do the simple thing. */
1437 if (!TYPE_UNSIGNED (type))
1438 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1440 if (type == sizetype)
1441 ctype = ssizetype;
1442 else if (type == bitsizetype)
1443 ctype = sbitsizetype;
1444 else
1445 ctype = signed_type_for (type);
1447 /* If either operand is not a constant, do the conversions to the signed
1448 type and subtract. The hardware will do the right thing with any
1449 overflow in the subtraction. */
1450 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1451 return size_binop_loc (loc, MINUS_EXPR,
1452 fold_convert_loc (loc, ctype, arg0),
1453 fold_convert_loc (loc, ctype, arg1));
1455 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1456 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1457 overflow) and negate (which can't either). Special-case a result
1458 of zero while we're here. */
1459 if (tree_int_cst_equal (arg0, arg1))
1460 return build_int_cst (ctype, 0);
1461 else if (tree_int_cst_lt (arg1, arg0))
1462 return fold_convert_loc (loc, ctype,
1463 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1464 else
1465 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1466 fold_convert_loc (loc, ctype,
1467 size_binop_loc (loc,
1468 MINUS_EXPR,
1469 arg1, arg0)));
1472 /* A subroutine of fold_convert_const handling conversions of an
1473 INTEGER_CST to another integer type. */
1475 static tree
1476 fold_convert_const_int_from_int (tree type, const_tree arg1)
1478 tree t;
1480 /* Given an integer constant, make new constant with new type,
1481 appropriately sign-extended or truncated. */
1482 t = force_fit_type_double (type, tree_to_double_int (arg1),
1483 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1484 (TREE_INT_CST_HIGH (arg1) < 0
1485 && (TYPE_UNSIGNED (type)
1486 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1487 | TREE_OVERFLOW (arg1));
1489 return t;
1492 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1493 to an integer type. */
1495 static tree
1496 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1498 int overflow = 0;
1499 tree t;
1501 /* The following code implements the floating point to integer
1502 conversion rules required by the Java Language Specification,
1503 that IEEE NaNs are mapped to zero and values that overflow
1504 the target precision saturate, i.e. values greater than
1505 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1506 are mapped to INT_MIN. These semantics are allowed by the
1507 C and C++ standards that simply state that the behavior of
1508 FP-to-integer conversion is unspecified upon overflow. */
1510 double_int val;
1511 REAL_VALUE_TYPE r;
1512 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1514 switch (code)
1516 case FIX_TRUNC_EXPR:
1517 real_trunc (&r, VOIDmode, &x);
1518 break;
1520 default:
1521 gcc_unreachable ();
1524 /* If R is NaN, return zero and show we have an overflow. */
1525 if (REAL_VALUE_ISNAN (r))
1527 overflow = 1;
1528 val = double_int_zero;
1531 /* See if R is less than the lower bound or greater than the
1532 upper bound. */
1534 if (! overflow)
1536 tree lt = TYPE_MIN_VALUE (type);
1537 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1538 if (REAL_VALUES_LESS (r, l))
1540 overflow = 1;
1541 val = tree_to_double_int (lt);
1545 if (! overflow)
1547 tree ut = TYPE_MAX_VALUE (type);
1548 if (ut)
1550 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1551 if (REAL_VALUES_LESS (u, r))
1553 overflow = 1;
1554 val = tree_to_double_int (ut);
1559 if (! overflow)
1560 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1562 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1563 return t;
1566 /* A subroutine of fold_convert_const handling conversions of a
1567 FIXED_CST to an integer type. */
1569 static tree
1570 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1572 tree t;
1573 double_int temp, temp_trunc;
1574 unsigned int mode;
1576 /* Right shift FIXED_CST to temp by fbit. */
1577 temp = TREE_FIXED_CST (arg1).data;
1578 mode = TREE_FIXED_CST (arg1).mode;
1579 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1581 temp = temp.rshift (GET_MODE_FBIT (mode),
1582 HOST_BITS_PER_DOUBLE_INT,
1583 SIGNED_FIXED_POINT_MODE_P (mode));
1585 /* Left shift temp to temp_trunc by fbit. */
1586 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1587 HOST_BITS_PER_DOUBLE_INT,
1588 SIGNED_FIXED_POINT_MODE_P (mode));
1590 else
1592 temp = double_int_zero;
1593 temp_trunc = double_int_zero;
1596 /* If FIXED_CST is negative, we need to round the value toward 0.
1597 By checking if the fractional bits are not zero to add 1 to temp. */
1598 if (SIGNED_FIXED_POINT_MODE_P (mode)
1599 && temp_trunc.is_negative ()
1600 && TREE_FIXED_CST (arg1).data != temp_trunc)
1601 temp += double_int_one;
1603 /* Given a fixed-point constant, make new constant with new type,
1604 appropriately sign-extended or truncated. */
1605 t = force_fit_type_double (type, temp, -1,
1606 (temp.is_negative ()
1607 && (TYPE_UNSIGNED (type)
1608 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1609 | TREE_OVERFLOW (arg1));
1611 return t;
1614 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1615 to another floating point type. */
1617 static tree
1618 fold_convert_const_real_from_real (tree type, const_tree arg1)
1620 REAL_VALUE_TYPE value;
1621 tree t;
1623 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1624 t = build_real (type, value);
1626 /* If converting an infinity or NAN to a representation that doesn't
1627 have one, set the overflow bit so that we can produce some kind of
1628 error message at the appropriate point if necessary. It's not the
1629 most user-friendly message, but it's better than nothing. */
1630 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1631 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1632 TREE_OVERFLOW (t) = 1;
1633 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1634 && !MODE_HAS_NANS (TYPE_MODE (type)))
1635 TREE_OVERFLOW (t) = 1;
1636 /* Regular overflow, conversion produced an infinity in a mode that
1637 can't represent them. */
1638 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1639 && REAL_VALUE_ISINF (value)
1640 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1641 TREE_OVERFLOW (t) = 1;
1642 else
1643 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1644 return t;
1647 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1648 to a floating point type. */
1650 static tree
1651 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1653 REAL_VALUE_TYPE value;
1654 tree t;
1656 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1657 t = build_real (type, value);
1659 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1660 return t;
1663 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1664 to another fixed-point type. */
1666 static tree
1667 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1669 FIXED_VALUE_TYPE value;
1670 tree t;
1671 bool overflow_p;
1673 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1674 TYPE_SATURATING (type));
1675 t = build_fixed (type, value);
1677 /* Propagate overflow flags. */
1678 if (overflow_p | TREE_OVERFLOW (arg1))
1679 TREE_OVERFLOW (t) = 1;
1680 return t;
1683 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1684 to a fixed-point type. */
1686 static tree
1687 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1689 FIXED_VALUE_TYPE value;
1690 tree t;
1691 bool overflow_p;
1693 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1694 TREE_INT_CST (arg1),
1695 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1696 TYPE_SATURATING (type));
1697 t = build_fixed (type, value);
1699 /* Propagate overflow flags. */
1700 if (overflow_p | TREE_OVERFLOW (arg1))
1701 TREE_OVERFLOW (t) = 1;
1702 return t;
1705 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1706 to a fixed-point type. */
1708 static tree
1709 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1711 FIXED_VALUE_TYPE value;
1712 tree t;
1713 bool overflow_p;
1715 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1716 &TREE_REAL_CST (arg1),
1717 TYPE_SATURATING (type));
1718 t = build_fixed (type, value);
1720 /* Propagate overflow flags. */
1721 if (overflow_p | TREE_OVERFLOW (arg1))
1722 TREE_OVERFLOW (t) = 1;
1723 return t;
1726 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1727 type TYPE. If no simplification can be done return NULL_TREE. */
1729 static tree
1730 fold_convert_const (enum tree_code code, tree type, tree arg1)
1732 if (TREE_TYPE (arg1) == type)
1733 return arg1;
1735 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1736 || TREE_CODE (type) == OFFSET_TYPE)
1738 if (TREE_CODE (arg1) == INTEGER_CST)
1739 return fold_convert_const_int_from_int (type, arg1);
1740 else if (TREE_CODE (arg1) == REAL_CST)
1741 return fold_convert_const_int_from_real (code, type, arg1);
1742 else if (TREE_CODE (arg1) == FIXED_CST)
1743 return fold_convert_const_int_from_fixed (type, arg1);
1745 else if (TREE_CODE (type) == REAL_TYPE)
1747 if (TREE_CODE (arg1) == INTEGER_CST)
1748 return build_real_from_int_cst (type, arg1);
1749 else if (TREE_CODE (arg1) == REAL_CST)
1750 return fold_convert_const_real_from_real (type, arg1);
1751 else if (TREE_CODE (arg1) == FIXED_CST)
1752 return fold_convert_const_real_from_fixed (type, arg1);
1754 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1756 if (TREE_CODE (arg1) == FIXED_CST)
1757 return fold_convert_const_fixed_from_fixed (type, arg1);
1758 else if (TREE_CODE (arg1) == INTEGER_CST)
1759 return fold_convert_const_fixed_from_int (type, arg1);
1760 else if (TREE_CODE (arg1) == REAL_CST)
1761 return fold_convert_const_fixed_from_real (type, arg1);
1763 return NULL_TREE;
1766 /* Construct a vector of zero elements of vector type TYPE. */
1768 static tree
1769 build_zero_vector (tree type)
1771 tree t;
1773 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1774 return build_vector_from_val (type, t);
1777 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1779 bool
1780 fold_convertible_p (const_tree type, const_tree arg)
1782 tree orig = TREE_TYPE (arg);
1784 if (type == orig)
1785 return true;
1787 if (TREE_CODE (arg) == ERROR_MARK
1788 || TREE_CODE (type) == ERROR_MARK
1789 || TREE_CODE (orig) == ERROR_MARK)
1790 return false;
1792 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1793 return true;
1795 switch (TREE_CODE (type))
1797 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1798 case POINTER_TYPE: case REFERENCE_TYPE:
1799 case OFFSET_TYPE:
1800 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1801 || TREE_CODE (orig) == OFFSET_TYPE)
1802 return true;
1803 return (TREE_CODE (orig) == VECTOR_TYPE
1804 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1806 case REAL_TYPE:
1807 case FIXED_POINT_TYPE:
1808 case COMPLEX_TYPE:
1809 case VECTOR_TYPE:
1810 case VOID_TYPE:
1811 return TREE_CODE (type) == TREE_CODE (orig);
1813 default:
1814 return false;
1818 /* Convert expression ARG to type TYPE. Used by the middle-end for
1819 simple conversions in preference to calling the front-end's convert. */
1821 tree
1822 fold_convert_loc (location_t loc, tree type, tree arg)
1824 tree orig = TREE_TYPE (arg);
1825 tree tem;
1827 if (type == orig)
1828 return arg;
1830 if (TREE_CODE (arg) == ERROR_MARK
1831 || TREE_CODE (type) == ERROR_MARK
1832 || TREE_CODE (orig) == ERROR_MARK)
1833 return error_mark_node;
1835 switch (TREE_CODE (type))
1837 case POINTER_TYPE:
1838 case REFERENCE_TYPE:
1839 /* Handle conversions between pointers to different address spaces. */
1840 if (POINTER_TYPE_P (orig)
1841 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1842 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1843 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1844 /* fall through */
1846 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1847 case OFFSET_TYPE:
1848 if (TREE_CODE (arg) == INTEGER_CST)
1850 tem = fold_convert_const (NOP_EXPR, type, arg);
1851 if (tem != NULL_TREE)
1852 return tem;
1854 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1855 || TREE_CODE (orig) == OFFSET_TYPE)
1856 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1857 if (TREE_CODE (orig) == COMPLEX_TYPE)
1858 return fold_convert_loc (loc, type,
1859 fold_build1_loc (loc, REALPART_EXPR,
1860 TREE_TYPE (orig), arg));
1861 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1862 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1863 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1865 case REAL_TYPE:
1866 if (TREE_CODE (arg) == INTEGER_CST)
1868 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1869 if (tem != NULL_TREE)
1870 return tem;
1872 else if (TREE_CODE (arg) == REAL_CST)
1874 tem = fold_convert_const (NOP_EXPR, type, arg);
1875 if (tem != NULL_TREE)
1876 return tem;
1878 else if (TREE_CODE (arg) == FIXED_CST)
1880 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1881 if (tem != NULL_TREE)
1882 return tem;
1885 switch (TREE_CODE (orig))
1887 case INTEGER_TYPE:
1888 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1889 case POINTER_TYPE: case REFERENCE_TYPE:
1890 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1892 case REAL_TYPE:
1893 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1895 case FIXED_POINT_TYPE:
1896 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1898 case COMPLEX_TYPE:
1899 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1900 return fold_convert_loc (loc, type, tem);
1902 default:
1903 gcc_unreachable ();
1906 case FIXED_POINT_TYPE:
1907 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1908 || TREE_CODE (arg) == REAL_CST)
1910 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1911 if (tem != NULL_TREE)
1912 goto fold_convert_exit;
1915 switch (TREE_CODE (orig))
1917 case FIXED_POINT_TYPE:
1918 case INTEGER_TYPE:
1919 case ENUMERAL_TYPE:
1920 case BOOLEAN_TYPE:
1921 case REAL_TYPE:
1922 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1924 case COMPLEX_TYPE:
1925 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1926 return fold_convert_loc (loc, type, tem);
1928 default:
1929 gcc_unreachable ();
1932 case COMPLEX_TYPE:
1933 switch (TREE_CODE (orig))
1935 case INTEGER_TYPE:
1936 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1937 case POINTER_TYPE: case REFERENCE_TYPE:
1938 case REAL_TYPE:
1939 case FIXED_POINT_TYPE:
1940 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1941 fold_convert_loc (loc, TREE_TYPE (type), arg),
1942 fold_convert_loc (loc, TREE_TYPE (type),
1943 integer_zero_node));
1944 case COMPLEX_TYPE:
1946 tree rpart, ipart;
1948 if (TREE_CODE (arg) == COMPLEX_EXPR)
1950 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1951 TREE_OPERAND (arg, 0));
1952 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1953 TREE_OPERAND (arg, 1));
1954 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1957 arg = save_expr (arg);
1958 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1959 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1960 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1961 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1962 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1965 default:
1966 gcc_unreachable ();
1969 case VECTOR_TYPE:
1970 if (integer_zerop (arg))
1971 return build_zero_vector (type);
1972 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1973 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1974 || TREE_CODE (orig) == VECTOR_TYPE);
1975 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
1977 case VOID_TYPE:
1978 tem = fold_ignored_result (arg);
1979 return fold_build1_loc (loc, NOP_EXPR, type, tem);
1981 default:
1982 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1983 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1984 gcc_unreachable ();
1986 fold_convert_exit:
1987 protected_set_expr_location_unshare (tem, loc);
1988 return tem;
1991 /* Return false if expr can be assumed not to be an lvalue, true
1992 otherwise. */
1994 static bool
1995 maybe_lvalue_p (const_tree x)
1997 /* We only need to wrap lvalue tree codes. */
1998 switch (TREE_CODE (x))
2000 case VAR_DECL:
2001 case PARM_DECL:
2002 case RESULT_DECL:
2003 case LABEL_DECL:
2004 case FUNCTION_DECL:
2005 case SSA_NAME:
2007 case COMPONENT_REF:
2008 case MEM_REF:
2009 case INDIRECT_REF:
2010 case ARRAY_REF:
2011 case ARRAY_RANGE_REF:
2012 case BIT_FIELD_REF:
2013 case OBJ_TYPE_REF:
2015 case REALPART_EXPR:
2016 case IMAGPART_EXPR:
2017 case PREINCREMENT_EXPR:
2018 case PREDECREMENT_EXPR:
2019 case SAVE_EXPR:
2020 case TRY_CATCH_EXPR:
2021 case WITH_CLEANUP_EXPR:
2022 case COMPOUND_EXPR:
2023 case MODIFY_EXPR:
2024 case TARGET_EXPR:
2025 case COND_EXPR:
2026 case BIND_EXPR:
2027 break;
2029 default:
2030 /* Assume the worst for front-end tree codes. */
2031 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2032 break;
2033 return false;
2036 return true;
2039 /* Return an expr equal to X but certainly not valid as an lvalue. */
2041 tree
2042 non_lvalue_loc (location_t loc, tree x)
2044 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2045 us. */
2046 if (in_gimple_form)
2047 return x;
2049 if (! maybe_lvalue_p (x))
2050 return x;
2051 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2054 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2055 Zero means allow extended lvalues. */
2057 int pedantic_lvalues;
2059 /* When pedantic, return an expr equal to X but certainly not valid as a
2060 pedantic lvalue. Otherwise, return X. */
2062 static tree
2063 pedantic_non_lvalue_loc (location_t loc, tree x)
2065 if (pedantic_lvalues)
2066 return non_lvalue_loc (loc, x);
2068 return protected_set_expr_location_unshare (x, loc);
2071 /* Given a tree comparison code, return the code that is the logical inverse.
2072 It is generally not safe to do this for floating-point comparisons, except
2073 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2074 ERROR_MARK in this case. */
2076 enum tree_code
2077 invert_tree_comparison (enum tree_code code, bool honor_nans)
2079 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2080 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2081 return ERROR_MARK;
2083 switch (code)
2085 case EQ_EXPR:
2086 return NE_EXPR;
2087 case NE_EXPR:
2088 return EQ_EXPR;
2089 case GT_EXPR:
2090 return honor_nans ? UNLE_EXPR : LE_EXPR;
2091 case GE_EXPR:
2092 return honor_nans ? UNLT_EXPR : LT_EXPR;
2093 case LT_EXPR:
2094 return honor_nans ? UNGE_EXPR : GE_EXPR;
2095 case LE_EXPR:
2096 return honor_nans ? UNGT_EXPR : GT_EXPR;
2097 case LTGT_EXPR:
2098 return UNEQ_EXPR;
2099 case UNEQ_EXPR:
2100 return LTGT_EXPR;
2101 case UNGT_EXPR:
2102 return LE_EXPR;
2103 case UNGE_EXPR:
2104 return LT_EXPR;
2105 case UNLT_EXPR:
2106 return GE_EXPR;
2107 case UNLE_EXPR:
2108 return GT_EXPR;
2109 case ORDERED_EXPR:
2110 return UNORDERED_EXPR;
2111 case UNORDERED_EXPR:
2112 return ORDERED_EXPR;
2113 default:
2114 gcc_unreachable ();
2118 /* Similar, but return the comparison that results if the operands are
2119 swapped. This is safe for floating-point. */
2121 enum tree_code
2122 swap_tree_comparison (enum tree_code code)
2124 switch (code)
2126 case EQ_EXPR:
2127 case NE_EXPR:
2128 case ORDERED_EXPR:
2129 case UNORDERED_EXPR:
2130 case LTGT_EXPR:
2131 case UNEQ_EXPR:
2132 return code;
2133 case GT_EXPR:
2134 return LT_EXPR;
2135 case GE_EXPR:
2136 return LE_EXPR;
2137 case LT_EXPR:
2138 return GT_EXPR;
2139 case LE_EXPR:
2140 return GE_EXPR;
2141 case UNGT_EXPR:
2142 return UNLT_EXPR;
2143 case UNGE_EXPR:
2144 return UNLE_EXPR;
2145 case UNLT_EXPR:
2146 return UNGT_EXPR;
2147 case UNLE_EXPR:
2148 return UNGE_EXPR;
2149 default:
2150 gcc_unreachable ();
2155 /* Convert a comparison tree code from an enum tree_code representation
2156 into a compcode bit-based encoding. This function is the inverse of
2157 compcode_to_comparison. */
2159 static enum comparison_code
2160 comparison_to_compcode (enum tree_code code)
2162 switch (code)
2164 case LT_EXPR:
2165 return COMPCODE_LT;
2166 case EQ_EXPR:
2167 return COMPCODE_EQ;
2168 case LE_EXPR:
2169 return COMPCODE_LE;
2170 case GT_EXPR:
2171 return COMPCODE_GT;
2172 case NE_EXPR:
2173 return COMPCODE_NE;
2174 case GE_EXPR:
2175 return COMPCODE_GE;
2176 case ORDERED_EXPR:
2177 return COMPCODE_ORD;
2178 case UNORDERED_EXPR:
2179 return COMPCODE_UNORD;
2180 case UNLT_EXPR:
2181 return COMPCODE_UNLT;
2182 case UNEQ_EXPR:
2183 return COMPCODE_UNEQ;
2184 case UNLE_EXPR:
2185 return COMPCODE_UNLE;
2186 case UNGT_EXPR:
2187 return COMPCODE_UNGT;
2188 case LTGT_EXPR:
2189 return COMPCODE_LTGT;
2190 case UNGE_EXPR:
2191 return COMPCODE_UNGE;
2192 default:
2193 gcc_unreachable ();
2197 /* Convert a compcode bit-based encoding of a comparison operator back
2198 to GCC's enum tree_code representation. This function is the
2199 inverse of comparison_to_compcode. */
2201 static enum tree_code
2202 compcode_to_comparison (enum comparison_code code)
2204 switch (code)
2206 case COMPCODE_LT:
2207 return LT_EXPR;
2208 case COMPCODE_EQ:
2209 return EQ_EXPR;
2210 case COMPCODE_LE:
2211 return LE_EXPR;
2212 case COMPCODE_GT:
2213 return GT_EXPR;
2214 case COMPCODE_NE:
2215 return NE_EXPR;
2216 case COMPCODE_GE:
2217 return GE_EXPR;
2218 case COMPCODE_ORD:
2219 return ORDERED_EXPR;
2220 case COMPCODE_UNORD:
2221 return UNORDERED_EXPR;
2222 case COMPCODE_UNLT:
2223 return UNLT_EXPR;
2224 case COMPCODE_UNEQ:
2225 return UNEQ_EXPR;
2226 case COMPCODE_UNLE:
2227 return UNLE_EXPR;
2228 case COMPCODE_UNGT:
2229 return UNGT_EXPR;
2230 case COMPCODE_LTGT:
2231 return LTGT_EXPR;
2232 case COMPCODE_UNGE:
2233 return UNGE_EXPR;
2234 default:
2235 gcc_unreachable ();
2239 /* Return a tree for the comparison which is the combination of
2240 doing the AND or OR (depending on CODE) of the two operations LCODE
2241 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2242 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2243 if this makes the transformation invalid. */
2245 tree
2246 combine_comparisons (location_t loc,
2247 enum tree_code code, enum tree_code lcode,
2248 enum tree_code rcode, tree truth_type,
2249 tree ll_arg, tree lr_arg)
2251 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2252 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2253 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2254 int compcode;
2256 switch (code)
2258 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2259 compcode = lcompcode & rcompcode;
2260 break;
2262 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2263 compcode = lcompcode | rcompcode;
2264 break;
2266 default:
2267 return NULL_TREE;
2270 if (!honor_nans)
2272 /* Eliminate unordered comparisons, as well as LTGT and ORD
2273 which are not used unless the mode has NaNs. */
2274 compcode &= ~COMPCODE_UNORD;
2275 if (compcode == COMPCODE_LTGT)
2276 compcode = COMPCODE_NE;
2277 else if (compcode == COMPCODE_ORD)
2278 compcode = COMPCODE_TRUE;
2280 else if (flag_trapping_math)
2282 /* Check that the original operation and the optimized ones will trap
2283 under the same condition. */
2284 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2285 && (lcompcode != COMPCODE_EQ)
2286 && (lcompcode != COMPCODE_ORD);
2287 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2288 && (rcompcode != COMPCODE_EQ)
2289 && (rcompcode != COMPCODE_ORD);
2290 bool trap = (compcode & COMPCODE_UNORD) == 0
2291 && (compcode != COMPCODE_EQ)
2292 && (compcode != COMPCODE_ORD);
2294 /* In a short-circuited boolean expression the LHS might be
2295 such that the RHS, if evaluated, will never trap. For
2296 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2297 if neither x nor y is NaN. (This is a mixed blessing: for
2298 example, the expression above will never trap, hence
2299 optimizing it to x < y would be invalid). */
2300 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2301 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2302 rtrap = false;
2304 /* If the comparison was short-circuited, and only the RHS
2305 trapped, we may now generate a spurious trap. */
2306 if (rtrap && !ltrap
2307 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2308 return NULL_TREE;
2310 /* If we changed the conditions that cause a trap, we lose. */
2311 if ((ltrap || rtrap) != trap)
2312 return NULL_TREE;
2315 if (compcode == COMPCODE_TRUE)
2316 return constant_boolean_node (true, truth_type);
2317 else if (compcode == COMPCODE_FALSE)
2318 return constant_boolean_node (false, truth_type);
2319 else
2321 enum tree_code tcode;
2323 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2324 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2328 /* Return nonzero if two operands (typically of the same tree node)
2329 are necessarily equal. If either argument has side-effects this
2330 function returns zero. FLAGS modifies behavior as follows:
2332 If OEP_ONLY_CONST is set, only return nonzero for constants.
2333 This function tests whether the operands are indistinguishable;
2334 it does not test whether they are equal using C's == operation.
2335 The distinction is important for IEEE floating point, because
2336 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2337 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2339 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2340 even though it may hold multiple values during a function.
2341 This is because a GCC tree node guarantees that nothing else is
2342 executed between the evaluation of its "operands" (which may often
2343 be evaluated in arbitrary order). Hence if the operands themselves
2344 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2345 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2346 unset means assuming isochronic (or instantaneous) tree equivalence.
2347 Unless comparing arbitrary expression trees, such as from different
2348 statements, this flag can usually be left unset.
2350 If OEP_PURE_SAME is set, then pure functions with identical arguments
2351 are considered the same. It is used when the caller has other ways
2352 to ensure that global memory is unchanged in between. */
2355 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2357 /* If either is ERROR_MARK, they aren't equal. */
2358 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2359 || TREE_TYPE (arg0) == error_mark_node
2360 || TREE_TYPE (arg1) == error_mark_node)
2361 return 0;
2363 /* Similar, if either does not have a type (like a released SSA name),
2364 they aren't equal. */
2365 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2366 return 0;
2368 /* Check equality of integer constants before bailing out due to
2369 precision differences. */
2370 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2371 return tree_int_cst_equal (arg0, arg1);
2373 /* If both types don't have the same signedness, then we can't consider
2374 them equal. We must check this before the STRIP_NOPS calls
2375 because they may change the signedness of the arguments. As pointers
2376 strictly don't have a signedness, require either two pointers or
2377 two non-pointers as well. */
2378 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2379 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2380 return 0;
2382 /* We cannot consider pointers to different address space equal. */
2383 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2384 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2385 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2386 return 0;
2388 /* If both types don't have the same precision, then it is not safe
2389 to strip NOPs. */
2390 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2391 return 0;
2393 STRIP_NOPS (arg0);
2394 STRIP_NOPS (arg1);
2396 /* In case both args are comparisons but with different comparison
2397 code, try to swap the comparison operands of one arg to produce
2398 a match and compare that variant. */
2399 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2400 && COMPARISON_CLASS_P (arg0)
2401 && COMPARISON_CLASS_P (arg1))
2403 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2405 if (TREE_CODE (arg0) == swap_code)
2406 return operand_equal_p (TREE_OPERAND (arg0, 0),
2407 TREE_OPERAND (arg1, 1), flags)
2408 && operand_equal_p (TREE_OPERAND (arg0, 1),
2409 TREE_OPERAND (arg1, 0), flags);
2412 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2413 /* This is needed for conversions and for COMPONENT_REF.
2414 Might as well play it safe and always test this. */
2415 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2416 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2417 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2418 return 0;
2420 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2421 We don't care about side effects in that case because the SAVE_EXPR
2422 takes care of that for us. In all other cases, two expressions are
2423 equal if they have no side effects. If we have two identical
2424 expressions with side effects that should be treated the same due
2425 to the only side effects being identical SAVE_EXPR's, that will
2426 be detected in the recursive calls below.
2427 If we are taking an invariant address of two identical objects
2428 they are necessarily equal as well. */
2429 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2430 && (TREE_CODE (arg0) == SAVE_EXPR
2431 || (flags & OEP_CONSTANT_ADDRESS_OF)
2432 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2433 return 1;
2435 /* Next handle constant cases, those for which we can return 1 even
2436 if ONLY_CONST is set. */
2437 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2438 switch (TREE_CODE (arg0))
2440 case INTEGER_CST:
2441 return tree_int_cst_equal (arg0, arg1);
2443 case FIXED_CST:
2444 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2445 TREE_FIXED_CST (arg1));
2447 case REAL_CST:
2448 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2449 TREE_REAL_CST (arg1)))
2450 return 1;
2453 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2455 /* If we do not distinguish between signed and unsigned zero,
2456 consider them equal. */
2457 if (real_zerop (arg0) && real_zerop (arg1))
2458 return 1;
2460 return 0;
2462 case VECTOR_CST:
2464 unsigned i;
2466 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2467 return 0;
2469 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2471 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2472 VECTOR_CST_ELT (arg1, i), flags))
2473 return 0;
2475 return 1;
2478 case COMPLEX_CST:
2479 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2480 flags)
2481 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2482 flags));
2484 case STRING_CST:
2485 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2486 && ! memcmp (TREE_STRING_POINTER (arg0),
2487 TREE_STRING_POINTER (arg1),
2488 TREE_STRING_LENGTH (arg0)));
2490 case ADDR_EXPR:
2491 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2492 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2493 ? OEP_CONSTANT_ADDRESS_OF : 0);
2494 default:
2495 break;
2498 if (flags & OEP_ONLY_CONST)
2499 return 0;
2501 /* Define macros to test an operand from arg0 and arg1 for equality and a
2502 variant that allows null and views null as being different from any
2503 non-null value. In the latter case, if either is null, the both
2504 must be; otherwise, do the normal comparison. */
2505 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2506 TREE_OPERAND (arg1, N), flags)
2508 #define OP_SAME_WITH_NULL(N) \
2509 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2510 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2512 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2514 case tcc_unary:
2515 /* Two conversions are equal only if signedness and modes match. */
2516 switch (TREE_CODE (arg0))
2518 CASE_CONVERT:
2519 case FIX_TRUNC_EXPR:
2520 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2521 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2522 return 0;
2523 break;
2524 default:
2525 break;
2528 return OP_SAME (0);
2531 case tcc_comparison:
2532 case tcc_binary:
2533 if (OP_SAME (0) && OP_SAME (1))
2534 return 1;
2536 /* For commutative ops, allow the other order. */
2537 return (commutative_tree_code (TREE_CODE (arg0))
2538 && operand_equal_p (TREE_OPERAND (arg0, 0),
2539 TREE_OPERAND (arg1, 1), flags)
2540 && operand_equal_p (TREE_OPERAND (arg0, 1),
2541 TREE_OPERAND (arg1, 0), flags));
2543 case tcc_reference:
2544 /* If either of the pointer (or reference) expressions we are
2545 dereferencing contain a side effect, these cannot be equal. */
2546 if (TREE_SIDE_EFFECTS (arg0)
2547 || TREE_SIDE_EFFECTS (arg1))
2548 return 0;
2550 switch (TREE_CODE (arg0))
2552 case INDIRECT_REF:
2553 case REALPART_EXPR:
2554 case IMAGPART_EXPR:
2555 return OP_SAME (0);
2557 case TARGET_MEM_REF:
2558 /* Require equal extra operands and then fall through to MEM_REF
2559 handling of the two common operands. */
2560 if (!OP_SAME_WITH_NULL (2)
2561 || !OP_SAME_WITH_NULL (3)
2562 || !OP_SAME_WITH_NULL (4))
2563 return 0;
2564 /* Fallthru. */
2565 case MEM_REF:
2566 /* Require equal access sizes, and similar pointer types.
2567 We can have incomplete types for array references of
2568 variable-sized arrays from the Fortran frontent
2569 though. */
2570 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2571 || (TYPE_SIZE (TREE_TYPE (arg0))
2572 && TYPE_SIZE (TREE_TYPE (arg1))
2573 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2574 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2575 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2576 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2577 && OP_SAME (0) && OP_SAME (1));
2579 case ARRAY_REF:
2580 case ARRAY_RANGE_REF:
2581 /* Operands 2 and 3 may be null.
2582 Compare the array index by value if it is constant first as we
2583 may have different types but same value here. */
2584 return (OP_SAME (0)
2585 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2586 TREE_OPERAND (arg1, 1))
2587 || OP_SAME (1))
2588 && OP_SAME_WITH_NULL (2)
2589 && OP_SAME_WITH_NULL (3));
2591 case COMPONENT_REF:
2592 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2593 may be NULL when we're called to compare MEM_EXPRs. */
2594 return OP_SAME_WITH_NULL (0)
2595 && OP_SAME (1)
2596 && OP_SAME_WITH_NULL (2);
2598 case BIT_FIELD_REF:
2599 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2601 default:
2602 return 0;
2605 case tcc_expression:
2606 switch (TREE_CODE (arg0))
2608 case ADDR_EXPR:
2609 case TRUTH_NOT_EXPR:
2610 return OP_SAME (0);
2612 case TRUTH_ANDIF_EXPR:
2613 case TRUTH_ORIF_EXPR:
2614 return OP_SAME (0) && OP_SAME (1);
2616 case FMA_EXPR:
2617 case WIDEN_MULT_PLUS_EXPR:
2618 case WIDEN_MULT_MINUS_EXPR:
2619 if (!OP_SAME (2))
2620 return 0;
2621 /* The multiplcation operands are commutative. */
2622 /* FALLTHRU */
2624 case TRUTH_AND_EXPR:
2625 case TRUTH_OR_EXPR:
2626 case TRUTH_XOR_EXPR:
2627 if (OP_SAME (0) && OP_SAME (1))
2628 return 1;
2630 /* Otherwise take into account this is a commutative operation. */
2631 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2632 TREE_OPERAND (arg1, 1), flags)
2633 && operand_equal_p (TREE_OPERAND (arg0, 1),
2634 TREE_OPERAND (arg1, 0), flags));
2636 case COND_EXPR:
2637 case VEC_COND_EXPR:
2638 case DOT_PROD_EXPR:
2639 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2641 default:
2642 return 0;
2645 case tcc_vl_exp:
2646 switch (TREE_CODE (arg0))
2648 case CALL_EXPR:
2649 /* If the CALL_EXPRs call different functions, then they
2650 clearly can not be equal. */
2651 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2652 flags))
2653 return 0;
2656 unsigned int cef = call_expr_flags (arg0);
2657 if (flags & OEP_PURE_SAME)
2658 cef &= ECF_CONST | ECF_PURE;
2659 else
2660 cef &= ECF_CONST;
2661 if (!cef)
2662 return 0;
2665 /* Now see if all the arguments are the same. */
2667 const_call_expr_arg_iterator iter0, iter1;
2668 const_tree a0, a1;
2669 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2670 a1 = first_const_call_expr_arg (arg1, &iter1);
2671 a0 && a1;
2672 a0 = next_const_call_expr_arg (&iter0),
2673 a1 = next_const_call_expr_arg (&iter1))
2674 if (! operand_equal_p (a0, a1, flags))
2675 return 0;
2677 /* If we get here and both argument lists are exhausted
2678 then the CALL_EXPRs are equal. */
2679 return ! (a0 || a1);
2681 default:
2682 return 0;
2685 case tcc_declaration:
2686 /* Consider __builtin_sqrt equal to sqrt. */
2687 return (TREE_CODE (arg0) == FUNCTION_DECL
2688 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2689 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2690 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2692 default:
2693 return 0;
2696 #undef OP_SAME
2697 #undef OP_SAME_WITH_NULL
2700 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2701 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2703 When in doubt, return 0. */
2705 static int
2706 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2708 int unsignedp1, unsignedpo;
2709 tree primarg0, primarg1, primother;
2710 unsigned int correct_width;
2712 if (operand_equal_p (arg0, arg1, 0))
2713 return 1;
2715 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2716 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2717 return 0;
2719 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2720 and see if the inner values are the same. This removes any
2721 signedness comparison, which doesn't matter here. */
2722 primarg0 = arg0, primarg1 = arg1;
2723 STRIP_NOPS (primarg0);
2724 STRIP_NOPS (primarg1);
2725 if (operand_equal_p (primarg0, primarg1, 0))
2726 return 1;
2728 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2729 actual comparison operand, ARG0.
2731 First throw away any conversions to wider types
2732 already present in the operands. */
2734 primarg1 = get_narrower (arg1, &unsignedp1);
2735 primother = get_narrower (other, &unsignedpo);
2737 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2738 if (unsignedp1 == unsignedpo
2739 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2740 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2742 tree type = TREE_TYPE (arg0);
2744 /* Make sure shorter operand is extended the right way
2745 to match the longer operand. */
2746 primarg1 = fold_convert (signed_or_unsigned_type_for
2747 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2749 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2750 return 1;
2753 return 0;
2756 /* See if ARG is an expression that is either a comparison or is performing
2757 arithmetic on comparisons. The comparisons must only be comparing
2758 two different values, which will be stored in *CVAL1 and *CVAL2; if
2759 they are nonzero it means that some operands have already been found.
2760 No variables may be used anywhere else in the expression except in the
2761 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2762 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2764 If this is true, return 1. Otherwise, return zero. */
2766 static int
2767 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2769 enum tree_code code = TREE_CODE (arg);
2770 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2772 /* We can handle some of the tcc_expression cases here. */
2773 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2774 tclass = tcc_unary;
2775 else if (tclass == tcc_expression
2776 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2777 || code == COMPOUND_EXPR))
2778 tclass = tcc_binary;
2780 else if (tclass == tcc_expression && code == SAVE_EXPR
2781 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2783 /* If we've already found a CVAL1 or CVAL2, this expression is
2784 two complex to handle. */
2785 if (*cval1 || *cval2)
2786 return 0;
2788 tclass = tcc_unary;
2789 *save_p = 1;
2792 switch (tclass)
2794 case tcc_unary:
2795 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2797 case tcc_binary:
2798 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2799 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2800 cval1, cval2, save_p));
2802 case tcc_constant:
2803 return 1;
2805 case tcc_expression:
2806 if (code == COND_EXPR)
2807 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2808 cval1, cval2, save_p)
2809 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2810 cval1, cval2, save_p)
2811 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2812 cval1, cval2, save_p));
2813 return 0;
2815 case tcc_comparison:
2816 /* First see if we can handle the first operand, then the second. For
2817 the second operand, we know *CVAL1 can't be zero. It must be that
2818 one side of the comparison is each of the values; test for the
2819 case where this isn't true by failing if the two operands
2820 are the same. */
2822 if (operand_equal_p (TREE_OPERAND (arg, 0),
2823 TREE_OPERAND (arg, 1), 0))
2824 return 0;
2826 if (*cval1 == 0)
2827 *cval1 = TREE_OPERAND (arg, 0);
2828 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2830 else if (*cval2 == 0)
2831 *cval2 = TREE_OPERAND (arg, 0);
2832 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2834 else
2835 return 0;
2837 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2839 else if (*cval2 == 0)
2840 *cval2 = TREE_OPERAND (arg, 1);
2841 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2843 else
2844 return 0;
2846 return 1;
2848 default:
2849 return 0;
2853 /* ARG is a tree that is known to contain just arithmetic operations and
2854 comparisons. Evaluate the operations in the tree substituting NEW0 for
2855 any occurrence of OLD0 as an operand of a comparison and likewise for
2856 NEW1 and OLD1. */
2858 static tree
2859 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2860 tree old1, tree new1)
2862 tree type = TREE_TYPE (arg);
2863 enum tree_code code = TREE_CODE (arg);
2864 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2866 /* We can handle some of the tcc_expression cases here. */
2867 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2868 tclass = tcc_unary;
2869 else if (tclass == tcc_expression
2870 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2871 tclass = tcc_binary;
2873 switch (tclass)
2875 case tcc_unary:
2876 return fold_build1_loc (loc, code, type,
2877 eval_subst (loc, TREE_OPERAND (arg, 0),
2878 old0, new0, old1, new1));
2880 case tcc_binary:
2881 return fold_build2_loc (loc, code, type,
2882 eval_subst (loc, TREE_OPERAND (arg, 0),
2883 old0, new0, old1, new1),
2884 eval_subst (loc, TREE_OPERAND (arg, 1),
2885 old0, new0, old1, new1));
2887 case tcc_expression:
2888 switch (code)
2890 case SAVE_EXPR:
2891 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2892 old1, new1);
2894 case COMPOUND_EXPR:
2895 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2896 old1, new1);
2898 case COND_EXPR:
2899 return fold_build3_loc (loc, code, type,
2900 eval_subst (loc, TREE_OPERAND (arg, 0),
2901 old0, new0, old1, new1),
2902 eval_subst (loc, TREE_OPERAND (arg, 1),
2903 old0, new0, old1, new1),
2904 eval_subst (loc, TREE_OPERAND (arg, 2),
2905 old0, new0, old1, new1));
2906 default:
2907 break;
2909 /* Fall through - ??? */
2911 case tcc_comparison:
2913 tree arg0 = TREE_OPERAND (arg, 0);
2914 tree arg1 = TREE_OPERAND (arg, 1);
2916 /* We need to check both for exact equality and tree equality. The
2917 former will be true if the operand has a side-effect. In that
2918 case, we know the operand occurred exactly once. */
2920 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2921 arg0 = new0;
2922 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2923 arg0 = new1;
2925 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2926 arg1 = new0;
2927 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2928 arg1 = new1;
2930 return fold_build2_loc (loc, code, type, arg0, arg1);
2933 default:
2934 return arg;
2938 /* Return a tree for the case when the result of an expression is RESULT
2939 converted to TYPE and OMITTED was previously an operand of the expression
2940 but is now not needed (e.g., we folded OMITTED * 0).
2942 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2943 the conversion of RESULT to TYPE. */
2945 tree
2946 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2948 tree t = fold_convert_loc (loc, type, result);
2950 /* If the resulting operand is an empty statement, just return the omitted
2951 statement casted to void. */
2952 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2953 return build1_loc (loc, NOP_EXPR, void_type_node,
2954 fold_ignored_result (omitted));
2956 if (TREE_SIDE_EFFECTS (omitted))
2957 return build2_loc (loc, COMPOUND_EXPR, type,
2958 fold_ignored_result (omitted), t);
2960 return non_lvalue_loc (loc, t);
2963 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2965 static tree
2966 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2967 tree omitted)
2969 tree t = fold_convert_loc (loc, type, result);
2971 /* If the resulting operand is an empty statement, just return the omitted
2972 statement casted to void. */
2973 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2974 return build1_loc (loc, NOP_EXPR, void_type_node,
2975 fold_ignored_result (omitted));
2977 if (TREE_SIDE_EFFECTS (omitted))
2978 return build2_loc (loc, COMPOUND_EXPR, type,
2979 fold_ignored_result (omitted), t);
2981 return pedantic_non_lvalue_loc (loc, t);
2984 /* Return a tree for the case when the result of an expression is RESULT
2985 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2986 of the expression but are now not needed.
2988 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2989 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2990 evaluated before OMITTED2. Otherwise, if neither has side effects,
2991 just do the conversion of RESULT to TYPE. */
2993 tree
2994 omit_two_operands_loc (location_t loc, tree type, tree result,
2995 tree omitted1, tree omitted2)
2997 tree t = fold_convert_loc (loc, type, result);
2999 if (TREE_SIDE_EFFECTS (omitted2))
3000 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3001 if (TREE_SIDE_EFFECTS (omitted1))
3002 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3004 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3008 /* Return a simplified tree node for the truth-negation of ARG. This
3009 never alters ARG itself. We assume that ARG is an operation that
3010 returns a truth value (0 or 1).
3012 FIXME: one would think we would fold the result, but it causes
3013 problems with the dominator optimizer. */
3015 tree
3016 fold_truth_not_expr (location_t loc, tree arg)
3018 tree type = TREE_TYPE (arg);
3019 enum tree_code code = TREE_CODE (arg);
3020 location_t loc1, loc2;
3022 /* If this is a comparison, we can simply invert it, except for
3023 floating-point non-equality comparisons, in which case we just
3024 enclose a TRUTH_NOT_EXPR around what we have. */
3026 if (TREE_CODE_CLASS (code) == tcc_comparison)
3028 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3029 if (FLOAT_TYPE_P (op_type)
3030 && flag_trapping_math
3031 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3032 && code != NE_EXPR && code != EQ_EXPR)
3033 return NULL_TREE;
3035 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3036 if (code == ERROR_MARK)
3037 return NULL_TREE;
3039 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3040 TREE_OPERAND (arg, 1));
3043 switch (code)
3045 case INTEGER_CST:
3046 return constant_boolean_node (integer_zerop (arg), type);
3048 case TRUTH_AND_EXPR:
3049 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3050 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3051 return build2_loc (loc, TRUTH_OR_EXPR, type,
3052 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3053 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3055 case TRUTH_OR_EXPR:
3056 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3057 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3058 return build2_loc (loc, TRUTH_AND_EXPR, type,
3059 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3060 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3062 case TRUTH_XOR_EXPR:
3063 /* Here we can invert either operand. We invert the first operand
3064 unless the second operand is a TRUTH_NOT_EXPR in which case our
3065 result is the XOR of the first operand with the inside of the
3066 negation of the second operand. */
3068 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3069 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3070 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3071 else
3072 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3073 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3074 TREE_OPERAND (arg, 1));
3076 case TRUTH_ANDIF_EXPR:
3077 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3078 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3079 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3080 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3081 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3083 case TRUTH_ORIF_EXPR:
3084 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3085 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3086 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3087 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3088 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3090 case TRUTH_NOT_EXPR:
3091 return TREE_OPERAND (arg, 0);
3093 case COND_EXPR:
3095 tree arg1 = TREE_OPERAND (arg, 1);
3096 tree arg2 = TREE_OPERAND (arg, 2);
3098 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3099 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3101 /* A COND_EXPR may have a throw as one operand, which
3102 then has void type. Just leave void operands
3103 as they are. */
3104 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3105 VOID_TYPE_P (TREE_TYPE (arg1))
3106 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3107 VOID_TYPE_P (TREE_TYPE (arg2))
3108 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3111 case COMPOUND_EXPR:
3112 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3113 return build2_loc (loc, COMPOUND_EXPR, type,
3114 TREE_OPERAND (arg, 0),
3115 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3117 case NON_LVALUE_EXPR:
3118 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3119 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3121 CASE_CONVERT:
3122 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3123 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3125 /* ... fall through ... */
3127 case FLOAT_EXPR:
3128 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3129 return build1_loc (loc, TREE_CODE (arg), type,
3130 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3132 case BIT_AND_EXPR:
3133 if (!integer_onep (TREE_OPERAND (arg, 1)))
3134 return NULL_TREE;
3135 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3137 case SAVE_EXPR:
3138 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3140 case CLEANUP_POINT_EXPR:
3141 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3142 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3143 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3145 default:
3146 return NULL_TREE;
3150 /* Return a simplified tree node for the truth-negation of ARG. This
3151 never alters ARG itself. We assume that ARG is an operation that
3152 returns a truth value (0 or 1).
3154 FIXME: one would think we would fold the result, but it causes
3155 problems with the dominator optimizer. */
3157 tree
3158 invert_truthvalue_loc (location_t loc, tree arg)
3160 tree tem;
3162 if (TREE_CODE (arg) == ERROR_MARK)
3163 return arg;
3165 tem = fold_truth_not_expr (loc, arg);
3166 if (!tem)
3167 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3169 return tem;
3172 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3173 operands are another bit-wise operation with a common input. If so,
3174 distribute the bit operations to save an operation and possibly two if
3175 constants are involved. For example, convert
3176 (A | B) & (A | C) into A | (B & C)
3177 Further simplification will occur if B and C are constants.
3179 If this optimization cannot be done, 0 will be returned. */
3181 static tree
3182 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3183 tree arg0, tree arg1)
3185 tree common;
3186 tree left, right;
3188 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3189 || TREE_CODE (arg0) == code
3190 || (TREE_CODE (arg0) != BIT_AND_EXPR
3191 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3192 return 0;
3194 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3196 common = TREE_OPERAND (arg0, 0);
3197 left = TREE_OPERAND (arg0, 1);
3198 right = TREE_OPERAND (arg1, 1);
3200 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3202 common = TREE_OPERAND (arg0, 0);
3203 left = TREE_OPERAND (arg0, 1);
3204 right = TREE_OPERAND (arg1, 0);
3206 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3208 common = TREE_OPERAND (arg0, 1);
3209 left = TREE_OPERAND (arg0, 0);
3210 right = TREE_OPERAND (arg1, 1);
3212 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3214 common = TREE_OPERAND (arg0, 1);
3215 left = TREE_OPERAND (arg0, 0);
3216 right = TREE_OPERAND (arg1, 0);
3218 else
3219 return 0;
3221 common = fold_convert_loc (loc, type, common);
3222 left = fold_convert_loc (loc, type, left);
3223 right = fold_convert_loc (loc, type, right);
3224 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3225 fold_build2_loc (loc, code, type, left, right));
3228 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3229 with code CODE. This optimization is unsafe. */
3230 static tree
3231 distribute_real_division (location_t loc, enum tree_code code, tree type,
3232 tree arg0, tree arg1)
3234 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3235 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3237 /* (A / C) +- (B / C) -> (A +- B) / C. */
3238 if (mul0 == mul1
3239 && operand_equal_p (TREE_OPERAND (arg0, 1),
3240 TREE_OPERAND (arg1, 1), 0))
3241 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3242 fold_build2_loc (loc, code, type,
3243 TREE_OPERAND (arg0, 0),
3244 TREE_OPERAND (arg1, 0)),
3245 TREE_OPERAND (arg0, 1));
3247 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3248 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3249 TREE_OPERAND (arg1, 0), 0)
3250 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3251 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3253 REAL_VALUE_TYPE r0, r1;
3254 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3255 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3256 if (!mul0)
3257 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3258 if (!mul1)
3259 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3260 real_arithmetic (&r0, code, &r0, &r1);
3261 return fold_build2_loc (loc, MULT_EXPR, type,
3262 TREE_OPERAND (arg0, 0),
3263 build_real (type, r0));
3266 return NULL_TREE;
3269 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3270 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3272 static tree
3273 make_bit_field_ref (location_t loc, tree inner, tree type,
3274 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3276 tree result, bftype;
3278 if (bitpos == 0)
3280 tree size = TYPE_SIZE (TREE_TYPE (inner));
3281 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3282 || POINTER_TYPE_P (TREE_TYPE (inner)))
3283 && host_integerp (size, 0)
3284 && tree_low_cst (size, 0) == bitsize)
3285 return fold_convert_loc (loc, type, inner);
3288 bftype = type;
3289 if (TYPE_PRECISION (bftype) != bitsize
3290 || TYPE_UNSIGNED (bftype) == !unsignedp)
3291 bftype = build_nonstandard_integer_type (bitsize, 0);
3293 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3294 size_int (bitsize), bitsize_int (bitpos));
3296 if (bftype != type)
3297 result = fold_convert_loc (loc, type, result);
3299 return result;
3302 /* Optimize a bit-field compare.
3304 There are two cases: First is a compare against a constant and the
3305 second is a comparison of two items where the fields are at the same
3306 bit position relative to the start of a chunk (byte, halfword, word)
3307 large enough to contain it. In these cases we can avoid the shift
3308 implicit in bitfield extractions.
3310 For constants, we emit a compare of the shifted constant with the
3311 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3312 compared. For two fields at the same position, we do the ANDs with the
3313 similar mask and compare the result of the ANDs.
3315 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3316 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3317 are the left and right operands of the comparison, respectively.
3319 If the optimization described above can be done, we return the resulting
3320 tree. Otherwise we return zero. */
3322 static tree
3323 optimize_bit_field_compare (location_t loc, enum tree_code code,
3324 tree compare_type, tree lhs, tree rhs)
3326 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3327 tree type = TREE_TYPE (lhs);
3328 tree signed_type, unsigned_type;
3329 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3330 enum machine_mode lmode, rmode, nmode;
3331 int lunsignedp, runsignedp;
3332 int lvolatilep = 0, rvolatilep = 0;
3333 tree linner, rinner = NULL_TREE;
3334 tree mask;
3335 tree offset;
3337 /* In the strict volatile bitfields case, doing code changes here may prevent
3338 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3339 if (flag_strict_volatile_bitfields > 0)
3340 return 0;
3342 /* Get all the information about the extractions being done. If the bit size
3343 if the same as the size of the underlying object, we aren't doing an
3344 extraction at all and so can do nothing. We also don't want to
3345 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3346 then will no longer be able to replace it. */
3347 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3348 &lunsignedp, &lvolatilep, false);
3349 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3350 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3351 return 0;
3353 if (!const_p)
3355 /* If this is not a constant, we can only do something if bit positions,
3356 sizes, and signedness are the same. */
3357 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3358 &runsignedp, &rvolatilep, false);
3360 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3361 || lunsignedp != runsignedp || offset != 0
3362 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3363 return 0;
3366 /* See if we can find a mode to refer to this field. We should be able to,
3367 but fail if we can't. */
3368 if (lvolatilep
3369 && GET_MODE_BITSIZE (lmode) > 0
3370 && flag_strict_volatile_bitfields > 0)
3371 nmode = lmode;
3372 else
3373 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3374 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3375 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3376 TYPE_ALIGN (TREE_TYPE (rinner))),
3377 word_mode, lvolatilep || rvolatilep);
3378 if (nmode == VOIDmode)
3379 return 0;
3381 /* Set signed and unsigned types of the precision of this mode for the
3382 shifts below. */
3383 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3384 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3386 /* Compute the bit position and size for the new reference and our offset
3387 within it. If the new reference is the same size as the original, we
3388 won't optimize anything, so return zero. */
3389 nbitsize = GET_MODE_BITSIZE (nmode);
3390 nbitpos = lbitpos & ~ (nbitsize - 1);
3391 lbitpos -= nbitpos;
3392 if (nbitsize == lbitsize)
3393 return 0;
3395 if (BYTES_BIG_ENDIAN)
3396 lbitpos = nbitsize - lbitsize - lbitpos;
3398 /* Make the mask to be used against the extracted field. */
3399 mask = build_int_cst_type (unsigned_type, -1);
3400 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3401 mask = const_binop (RSHIFT_EXPR, mask,
3402 size_int (nbitsize - lbitsize - lbitpos));
3404 if (! const_p)
3405 /* If not comparing with constant, just rework the comparison
3406 and return. */
3407 return fold_build2_loc (loc, code, compare_type,
3408 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3409 make_bit_field_ref (loc, linner,
3410 unsigned_type,
3411 nbitsize, nbitpos,
3413 mask),
3414 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3415 make_bit_field_ref (loc, rinner,
3416 unsigned_type,
3417 nbitsize, nbitpos,
3419 mask));
3421 /* Otherwise, we are handling the constant case. See if the constant is too
3422 big for the field. Warn and return a tree of for 0 (false) if so. We do
3423 this not only for its own sake, but to avoid having to test for this
3424 error case below. If we didn't, we might generate wrong code.
3426 For unsigned fields, the constant shifted right by the field length should
3427 be all zero. For signed fields, the high-order bits should agree with
3428 the sign bit. */
3430 if (lunsignedp)
3432 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3433 fold_convert_loc (loc,
3434 unsigned_type, rhs),
3435 size_int (lbitsize))))
3437 warning (0, "comparison is always %d due to width of bit-field",
3438 code == NE_EXPR);
3439 return constant_boolean_node (code == NE_EXPR, compare_type);
3442 else
3444 tree tem = const_binop (RSHIFT_EXPR,
3445 fold_convert_loc (loc, signed_type, rhs),
3446 size_int (lbitsize - 1));
3447 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3449 warning (0, "comparison is always %d due to width of bit-field",
3450 code == NE_EXPR);
3451 return constant_boolean_node (code == NE_EXPR, compare_type);
3455 /* Single-bit compares should always be against zero. */
3456 if (lbitsize == 1 && ! integer_zerop (rhs))
3458 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3459 rhs = build_int_cst (type, 0);
3462 /* Make a new bitfield reference, shift the constant over the
3463 appropriate number of bits and mask it with the computed mask
3464 (in case this was a signed field). If we changed it, make a new one. */
3465 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3466 if (lvolatilep)
3468 TREE_SIDE_EFFECTS (lhs) = 1;
3469 TREE_THIS_VOLATILE (lhs) = 1;
3472 rhs = const_binop (BIT_AND_EXPR,
3473 const_binop (LSHIFT_EXPR,
3474 fold_convert_loc (loc, unsigned_type, rhs),
3475 size_int (lbitpos)),
3476 mask);
3478 lhs = build2_loc (loc, code, compare_type,
3479 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3480 return lhs;
3483 /* Subroutine for fold_truth_andor_1: decode a field reference.
3485 If EXP is a comparison reference, we return the innermost reference.
3487 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3488 set to the starting bit number.
3490 If the innermost field can be completely contained in a mode-sized
3491 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3493 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3494 otherwise it is not changed.
3496 *PUNSIGNEDP is set to the signedness of the field.
3498 *PMASK is set to the mask used. This is either contained in a
3499 BIT_AND_EXPR or derived from the width of the field.
3501 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3503 Return 0 if this is not a component reference or is one that we can't
3504 do anything with. */
3506 static tree
3507 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3508 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3509 int *punsignedp, int *pvolatilep,
3510 tree *pmask, tree *pand_mask)
3512 tree outer_type = 0;
3513 tree and_mask = 0;
3514 tree mask, inner, offset;
3515 tree unsigned_type;
3516 unsigned int precision;
3518 /* All the optimizations using this function assume integer fields.
3519 There are problems with FP fields since the type_for_size call
3520 below can fail for, e.g., XFmode. */
3521 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3522 return 0;
3524 /* We are interested in the bare arrangement of bits, so strip everything
3525 that doesn't affect the machine mode. However, record the type of the
3526 outermost expression if it may matter below. */
3527 if (CONVERT_EXPR_P (exp)
3528 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3529 outer_type = TREE_TYPE (exp);
3530 STRIP_NOPS (exp);
3532 if (TREE_CODE (exp) == BIT_AND_EXPR)
3534 and_mask = TREE_OPERAND (exp, 1);
3535 exp = TREE_OPERAND (exp, 0);
3536 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3537 if (TREE_CODE (and_mask) != INTEGER_CST)
3538 return 0;
3541 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3542 punsignedp, pvolatilep, false);
3543 if ((inner == exp && and_mask == 0)
3544 || *pbitsize < 0 || offset != 0
3545 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3546 return 0;
3548 /* If the number of bits in the reference is the same as the bitsize of
3549 the outer type, then the outer type gives the signedness. Otherwise
3550 (in case of a small bitfield) the signedness is unchanged. */
3551 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3552 *punsignedp = TYPE_UNSIGNED (outer_type);
3554 /* Compute the mask to access the bitfield. */
3555 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3556 precision = TYPE_PRECISION (unsigned_type);
3558 mask = build_int_cst_type (unsigned_type, -1);
3560 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3561 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3563 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3564 if (and_mask != 0)
3565 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3566 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3568 *pmask = mask;
3569 *pand_mask = and_mask;
3570 return inner;
3573 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3574 bit positions. */
3576 static int
3577 all_ones_mask_p (const_tree mask, int size)
3579 tree type = TREE_TYPE (mask);
3580 unsigned int precision = TYPE_PRECISION (type);
3581 tree tmask;
3583 tmask = build_int_cst_type (signed_type_for (type), -1);
3585 return
3586 tree_int_cst_equal (mask,
3587 const_binop (RSHIFT_EXPR,
3588 const_binop (LSHIFT_EXPR, tmask,
3589 size_int (precision - size)),
3590 size_int (precision - size)));
3593 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3594 represents the sign bit of EXP's type. If EXP represents a sign
3595 or zero extension, also test VAL against the unextended type.
3596 The return value is the (sub)expression whose sign bit is VAL,
3597 or NULL_TREE otherwise. */
3599 static tree
3600 sign_bit_p (tree exp, const_tree val)
3602 unsigned HOST_WIDE_INT mask_lo, lo;
3603 HOST_WIDE_INT mask_hi, hi;
3604 int width;
3605 tree t;
3607 /* Tree EXP must have an integral type. */
3608 t = TREE_TYPE (exp);
3609 if (! INTEGRAL_TYPE_P (t))
3610 return NULL_TREE;
3612 /* Tree VAL must be an integer constant. */
3613 if (TREE_CODE (val) != INTEGER_CST
3614 || TREE_OVERFLOW (val))
3615 return NULL_TREE;
3617 width = TYPE_PRECISION (t);
3618 if (width > HOST_BITS_PER_WIDE_INT)
3620 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3621 lo = 0;
3623 mask_hi = ((unsigned HOST_WIDE_INT) -1
3624 >> (HOST_BITS_PER_DOUBLE_INT - width));
3625 mask_lo = -1;
3627 else
3629 hi = 0;
3630 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3632 mask_hi = 0;
3633 mask_lo = ((unsigned HOST_WIDE_INT) -1
3634 >> (HOST_BITS_PER_WIDE_INT - width));
3637 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3638 treat VAL as if it were unsigned. */
3639 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3640 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3641 return exp;
3643 /* Handle extension from a narrower type. */
3644 if (TREE_CODE (exp) == NOP_EXPR
3645 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3646 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3648 return NULL_TREE;
3651 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3652 to be evaluated unconditionally. */
3654 static int
3655 simple_operand_p (const_tree exp)
3657 /* Strip any conversions that don't change the machine mode. */
3658 STRIP_NOPS (exp);
3660 return (CONSTANT_CLASS_P (exp)
3661 || TREE_CODE (exp) == SSA_NAME
3662 || (DECL_P (exp)
3663 && ! TREE_ADDRESSABLE (exp)
3664 && ! TREE_THIS_VOLATILE (exp)
3665 && ! DECL_NONLOCAL (exp)
3666 /* Don't regard global variables as simple. They may be
3667 allocated in ways unknown to the compiler (shared memory,
3668 #pragma weak, etc). */
3669 && ! TREE_PUBLIC (exp)
3670 && ! DECL_EXTERNAL (exp)
3671 /* Loading a static variable is unduly expensive, but global
3672 registers aren't expensive. */
3673 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3676 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3677 to be evaluated unconditionally.
3678 I addition to simple_operand_p, we assume that comparisons, conversions,
3679 and logic-not operations are simple, if their operands are simple, too. */
3681 static bool
3682 simple_operand_p_2 (tree exp)
3684 enum tree_code code;
3686 if (TREE_SIDE_EFFECTS (exp)
3687 || tree_could_trap_p (exp))
3688 return false;
3690 while (CONVERT_EXPR_P (exp))
3691 exp = TREE_OPERAND (exp, 0);
3693 code = TREE_CODE (exp);
3695 if (TREE_CODE_CLASS (code) == tcc_comparison)
3696 return (simple_operand_p (TREE_OPERAND (exp, 0))
3697 && simple_operand_p (TREE_OPERAND (exp, 1)));
3699 if (code == TRUTH_NOT_EXPR)
3700 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3702 return simple_operand_p (exp);
3706 /* The following functions are subroutines to fold_range_test and allow it to
3707 try to change a logical combination of comparisons into a range test.
3709 For example, both
3710 X == 2 || X == 3 || X == 4 || X == 5
3712 X >= 2 && X <= 5
3713 are converted to
3714 (unsigned) (X - 2) <= 3
3716 We describe each set of comparisons as being either inside or outside
3717 a range, using a variable named like IN_P, and then describe the
3718 range with a lower and upper bound. If one of the bounds is omitted,
3719 it represents either the highest or lowest value of the type.
3721 In the comments below, we represent a range by two numbers in brackets
3722 preceded by a "+" to designate being inside that range, or a "-" to
3723 designate being outside that range, so the condition can be inverted by
3724 flipping the prefix. An omitted bound is represented by a "-". For
3725 example, "- [-, 10]" means being outside the range starting at the lowest
3726 possible value and ending at 10, in other words, being greater than 10.
3727 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3728 always false.
3730 We set up things so that the missing bounds are handled in a consistent
3731 manner so neither a missing bound nor "true" and "false" need to be
3732 handled using a special case. */
3734 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3735 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3736 and UPPER1_P are nonzero if the respective argument is an upper bound
3737 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3738 must be specified for a comparison. ARG1 will be converted to ARG0's
3739 type if both are specified. */
3741 static tree
3742 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3743 tree arg1, int upper1_p)
3745 tree tem;
3746 int result;
3747 int sgn0, sgn1;
3749 /* If neither arg represents infinity, do the normal operation.
3750 Else, if not a comparison, return infinity. Else handle the special
3751 comparison rules. Note that most of the cases below won't occur, but
3752 are handled for consistency. */
3754 if (arg0 != 0 && arg1 != 0)
3756 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3757 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3758 STRIP_NOPS (tem);
3759 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3762 if (TREE_CODE_CLASS (code) != tcc_comparison)
3763 return 0;
3765 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3766 for neither. In real maths, we cannot assume open ended ranges are
3767 the same. But, this is computer arithmetic, where numbers are finite.
3768 We can therefore make the transformation of any unbounded range with
3769 the value Z, Z being greater than any representable number. This permits
3770 us to treat unbounded ranges as equal. */
3771 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3772 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3773 switch (code)
3775 case EQ_EXPR:
3776 result = sgn0 == sgn1;
3777 break;
3778 case NE_EXPR:
3779 result = sgn0 != sgn1;
3780 break;
3781 case LT_EXPR:
3782 result = sgn0 < sgn1;
3783 break;
3784 case LE_EXPR:
3785 result = sgn0 <= sgn1;
3786 break;
3787 case GT_EXPR:
3788 result = sgn0 > sgn1;
3789 break;
3790 case GE_EXPR:
3791 result = sgn0 >= sgn1;
3792 break;
3793 default:
3794 gcc_unreachable ();
3797 return constant_boolean_node (result, type);
3800 /* Helper routine for make_range. Perform one step for it, return
3801 new expression if the loop should continue or NULL_TREE if it should
3802 stop. */
3804 tree
3805 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3806 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3807 bool *strict_overflow_p)
3809 tree arg0_type = TREE_TYPE (arg0);
3810 tree n_low, n_high, low = *p_low, high = *p_high;
3811 int in_p = *p_in_p, n_in_p;
3813 switch (code)
3815 case TRUTH_NOT_EXPR:
3816 *p_in_p = ! in_p;
3817 return arg0;
3819 case EQ_EXPR: case NE_EXPR:
3820 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3821 /* We can only do something if the range is testing for zero
3822 and if the second operand is an integer constant. Note that
3823 saying something is "in" the range we make is done by
3824 complementing IN_P since it will set in the initial case of
3825 being not equal to zero; "out" is leaving it alone. */
3826 if (low == NULL_TREE || high == NULL_TREE
3827 || ! integer_zerop (low) || ! integer_zerop (high)
3828 || TREE_CODE (arg1) != INTEGER_CST)
3829 return NULL_TREE;
3831 switch (code)
3833 case NE_EXPR: /* - [c, c] */
3834 low = high = arg1;
3835 break;
3836 case EQ_EXPR: /* + [c, c] */
3837 in_p = ! in_p, low = high = arg1;
3838 break;
3839 case GT_EXPR: /* - [-, c] */
3840 low = 0, high = arg1;
3841 break;
3842 case GE_EXPR: /* + [c, -] */
3843 in_p = ! in_p, low = arg1, high = 0;
3844 break;
3845 case LT_EXPR: /* - [c, -] */
3846 low = arg1, high = 0;
3847 break;
3848 case LE_EXPR: /* + [-, c] */
3849 in_p = ! in_p, low = 0, high = arg1;
3850 break;
3851 default:
3852 gcc_unreachable ();
3855 /* If this is an unsigned comparison, we also know that EXP is
3856 greater than or equal to zero. We base the range tests we make
3857 on that fact, so we record it here so we can parse existing
3858 range tests. We test arg0_type since often the return type
3859 of, e.g. EQ_EXPR, is boolean. */
3860 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3862 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3863 in_p, low, high, 1,
3864 build_int_cst (arg0_type, 0),
3865 NULL_TREE))
3866 return NULL_TREE;
3868 in_p = n_in_p, low = n_low, high = n_high;
3870 /* If the high bound is missing, but we have a nonzero low
3871 bound, reverse the range so it goes from zero to the low bound
3872 minus 1. */
3873 if (high == 0 && low && ! integer_zerop (low))
3875 in_p = ! in_p;
3876 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3877 integer_one_node, 0);
3878 low = build_int_cst (arg0_type, 0);
3882 *p_low = low;
3883 *p_high = high;
3884 *p_in_p = in_p;
3885 return arg0;
3887 case NEGATE_EXPR:
3888 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3889 low and high are non-NULL, then normalize will DTRT. */
3890 if (!TYPE_UNSIGNED (arg0_type)
3891 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3893 if (low == NULL_TREE)
3894 low = TYPE_MIN_VALUE (arg0_type);
3895 if (high == NULL_TREE)
3896 high = TYPE_MAX_VALUE (arg0_type);
3899 /* (-x) IN [a,b] -> x in [-b, -a] */
3900 n_low = range_binop (MINUS_EXPR, exp_type,
3901 build_int_cst (exp_type, 0),
3902 0, high, 1);
3903 n_high = range_binop (MINUS_EXPR, exp_type,
3904 build_int_cst (exp_type, 0),
3905 0, low, 0);
3906 if (n_high != 0 && TREE_OVERFLOW (n_high))
3907 return NULL_TREE;
3908 goto normalize;
3910 case BIT_NOT_EXPR:
3911 /* ~ X -> -X - 1 */
3912 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3913 build_int_cst (exp_type, 1));
3915 case PLUS_EXPR:
3916 case MINUS_EXPR:
3917 if (TREE_CODE (arg1) != INTEGER_CST)
3918 return NULL_TREE;
3920 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3921 move a constant to the other side. */
3922 if (!TYPE_UNSIGNED (arg0_type)
3923 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3924 return NULL_TREE;
3926 /* If EXP is signed, any overflow in the computation is undefined,
3927 so we don't worry about it so long as our computations on
3928 the bounds don't overflow. For unsigned, overflow is defined
3929 and this is exactly the right thing. */
3930 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3931 arg0_type, low, 0, arg1, 0);
3932 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3933 arg0_type, high, 1, arg1, 0);
3934 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3935 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3936 return NULL_TREE;
3938 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3939 *strict_overflow_p = true;
3941 normalize:
3942 /* Check for an unsigned range which has wrapped around the maximum
3943 value thus making n_high < n_low, and normalize it. */
3944 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3946 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3947 integer_one_node, 0);
3948 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3949 integer_one_node, 0);
3951 /* If the range is of the form +/- [ x+1, x ], we won't
3952 be able to normalize it. But then, it represents the
3953 whole range or the empty set, so make it
3954 +/- [ -, - ]. */
3955 if (tree_int_cst_equal (n_low, low)
3956 && tree_int_cst_equal (n_high, high))
3957 low = high = 0;
3958 else
3959 in_p = ! in_p;
3961 else
3962 low = n_low, high = n_high;
3964 *p_low = low;
3965 *p_high = high;
3966 *p_in_p = in_p;
3967 return arg0;
3969 CASE_CONVERT:
3970 case NON_LVALUE_EXPR:
3971 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3972 return NULL_TREE;
3974 if (! INTEGRAL_TYPE_P (arg0_type)
3975 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3976 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3977 return NULL_TREE;
3979 n_low = low, n_high = high;
3981 if (n_low != 0)
3982 n_low = fold_convert_loc (loc, arg0_type, n_low);
3984 if (n_high != 0)
3985 n_high = fold_convert_loc (loc, arg0_type, n_high);
3987 /* If we're converting arg0 from an unsigned type, to exp,
3988 a signed type, we will be doing the comparison as unsigned.
3989 The tests above have already verified that LOW and HIGH
3990 are both positive.
3992 So we have to ensure that we will handle large unsigned
3993 values the same way that the current signed bounds treat
3994 negative values. */
3996 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3998 tree high_positive;
3999 tree equiv_type;
4000 /* For fixed-point modes, we need to pass the saturating flag
4001 as the 2nd parameter. */
4002 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4003 equiv_type
4004 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4005 TYPE_SATURATING (arg0_type));
4006 else
4007 equiv_type
4008 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4010 /* A range without an upper bound is, naturally, unbounded.
4011 Since convert would have cropped a very large value, use
4012 the max value for the destination type. */
4013 high_positive
4014 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4015 : TYPE_MAX_VALUE (arg0_type);
4017 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4018 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4019 fold_convert_loc (loc, arg0_type,
4020 high_positive),
4021 build_int_cst (arg0_type, 1));
4023 /* If the low bound is specified, "and" the range with the
4024 range for which the original unsigned value will be
4025 positive. */
4026 if (low != 0)
4028 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4029 1, fold_convert_loc (loc, arg0_type,
4030 integer_zero_node),
4031 high_positive))
4032 return NULL_TREE;
4034 in_p = (n_in_p == in_p);
4036 else
4038 /* Otherwise, "or" the range with the range of the input
4039 that will be interpreted as negative. */
4040 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4041 1, fold_convert_loc (loc, arg0_type,
4042 integer_zero_node),
4043 high_positive))
4044 return NULL_TREE;
4046 in_p = (in_p != n_in_p);
4050 *p_low = n_low;
4051 *p_high = n_high;
4052 *p_in_p = in_p;
4053 return arg0;
4055 default:
4056 return NULL_TREE;
4060 /* Given EXP, a logical expression, set the range it is testing into
4061 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4062 actually being tested. *PLOW and *PHIGH will be made of the same
4063 type as the returned expression. If EXP is not a comparison, we
4064 will most likely not be returning a useful value and range. Set
4065 *STRICT_OVERFLOW_P to true if the return value is only valid
4066 because signed overflow is undefined; otherwise, do not change
4067 *STRICT_OVERFLOW_P. */
4069 tree
4070 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4071 bool *strict_overflow_p)
4073 enum tree_code code;
4074 tree arg0, arg1 = NULL_TREE;
4075 tree exp_type, nexp;
4076 int in_p;
4077 tree low, high;
4078 location_t loc = EXPR_LOCATION (exp);
4080 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4081 and see if we can refine the range. Some of the cases below may not
4082 happen, but it doesn't seem worth worrying about this. We "continue"
4083 the outer loop when we've changed something; otherwise we "break"
4084 the switch, which will "break" the while. */
4086 in_p = 0;
4087 low = high = build_int_cst (TREE_TYPE (exp), 0);
4089 while (1)
4091 code = TREE_CODE (exp);
4092 exp_type = TREE_TYPE (exp);
4093 arg0 = NULL_TREE;
4095 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4097 if (TREE_OPERAND_LENGTH (exp) > 0)
4098 arg0 = TREE_OPERAND (exp, 0);
4099 if (TREE_CODE_CLASS (code) == tcc_binary
4100 || TREE_CODE_CLASS (code) == tcc_comparison
4101 || (TREE_CODE_CLASS (code) == tcc_expression
4102 && TREE_OPERAND_LENGTH (exp) > 1))
4103 arg1 = TREE_OPERAND (exp, 1);
4105 if (arg0 == NULL_TREE)
4106 break;
4108 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4109 &high, &in_p, strict_overflow_p);
4110 if (nexp == NULL_TREE)
4111 break;
4112 exp = nexp;
4115 /* If EXP is a constant, we can evaluate whether this is true or false. */
4116 if (TREE_CODE (exp) == INTEGER_CST)
4118 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4119 exp, 0, low, 0))
4120 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4121 exp, 1, high, 1)));
4122 low = high = 0;
4123 exp = 0;
4126 *pin_p = in_p, *plow = low, *phigh = high;
4127 return exp;
4130 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4131 type, TYPE, return an expression to test if EXP is in (or out of, depending
4132 on IN_P) the range. Return 0 if the test couldn't be created. */
4134 tree
4135 build_range_check (location_t loc, tree type, tree exp, int in_p,
4136 tree low, tree high)
4138 tree etype = TREE_TYPE (exp), value;
4140 #ifdef HAVE_canonicalize_funcptr_for_compare
4141 /* Disable this optimization for function pointer expressions
4142 on targets that require function pointer canonicalization. */
4143 if (HAVE_canonicalize_funcptr_for_compare
4144 && TREE_CODE (etype) == POINTER_TYPE
4145 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4146 return NULL_TREE;
4147 #endif
4149 if (! in_p)
4151 value = build_range_check (loc, type, exp, 1, low, high);
4152 if (value != 0)
4153 return invert_truthvalue_loc (loc, value);
4155 return 0;
4158 if (low == 0 && high == 0)
4159 return build_int_cst (type, 1);
4161 if (low == 0)
4162 return fold_build2_loc (loc, LE_EXPR, type, exp,
4163 fold_convert_loc (loc, etype, high));
4165 if (high == 0)
4166 return fold_build2_loc (loc, GE_EXPR, type, exp,
4167 fold_convert_loc (loc, etype, low));
4169 if (operand_equal_p (low, high, 0))
4170 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4171 fold_convert_loc (loc, etype, low));
4173 if (integer_zerop (low))
4175 if (! TYPE_UNSIGNED (etype))
4177 etype = unsigned_type_for (etype);
4178 high = fold_convert_loc (loc, etype, high);
4179 exp = fold_convert_loc (loc, etype, exp);
4181 return build_range_check (loc, type, exp, 1, 0, high);
4184 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4185 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4187 unsigned HOST_WIDE_INT lo;
4188 HOST_WIDE_INT hi;
4189 int prec;
4191 prec = TYPE_PRECISION (etype);
4192 if (prec <= HOST_BITS_PER_WIDE_INT)
4194 hi = 0;
4195 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4197 else
4199 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4200 lo = (unsigned HOST_WIDE_INT) -1;
4203 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4205 if (TYPE_UNSIGNED (etype))
4207 tree signed_etype = signed_type_for (etype);
4208 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4209 etype
4210 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4211 else
4212 etype = signed_etype;
4213 exp = fold_convert_loc (loc, etype, exp);
4215 return fold_build2_loc (loc, GT_EXPR, type, exp,
4216 build_int_cst (etype, 0));
4220 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4221 This requires wrap-around arithmetics for the type of the expression.
4222 First make sure that arithmetics in this type is valid, then make sure
4223 that it wraps around. */
4224 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4225 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4226 TYPE_UNSIGNED (etype));
4228 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4230 tree utype, minv, maxv;
4232 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4233 for the type in question, as we rely on this here. */
4234 utype = unsigned_type_for (etype);
4235 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4236 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4237 integer_one_node, 1);
4238 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4240 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4241 minv, 1, maxv, 1)))
4242 etype = utype;
4243 else
4244 return 0;
4247 high = fold_convert_loc (loc, etype, high);
4248 low = fold_convert_loc (loc, etype, low);
4249 exp = fold_convert_loc (loc, etype, exp);
4251 value = const_binop (MINUS_EXPR, high, low);
4254 if (POINTER_TYPE_P (etype))
4256 if (value != 0 && !TREE_OVERFLOW (value))
4258 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4259 return build_range_check (loc, type,
4260 fold_build_pointer_plus_loc (loc, exp, low),
4261 1, build_int_cst (etype, 0), value);
4263 return 0;
4266 if (value != 0 && !TREE_OVERFLOW (value))
4267 return build_range_check (loc, type,
4268 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4269 1, build_int_cst (etype, 0), value);
4271 return 0;
4274 /* Return the predecessor of VAL in its type, handling the infinite case. */
4276 static tree
4277 range_predecessor (tree val)
4279 tree type = TREE_TYPE (val);
4281 if (INTEGRAL_TYPE_P (type)
4282 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4283 return 0;
4284 else
4285 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4288 /* Return the successor of VAL in its type, handling the infinite case. */
4290 static tree
4291 range_successor (tree val)
4293 tree type = TREE_TYPE (val);
4295 if (INTEGRAL_TYPE_P (type)
4296 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4297 return 0;
4298 else
4299 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4302 /* Given two ranges, see if we can merge them into one. Return 1 if we
4303 can, 0 if we can't. Set the output range into the specified parameters. */
4305 bool
4306 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4307 tree high0, int in1_p, tree low1, tree high1)
4309 int no_overlap;
4310 int subset;
4311 int temp;
4312 tree tem;
4313 int in_p;
4314 tree low, high;
4315 int lowequal = ((low0 == 0 && low1 == 0)
4316 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4317 low0, 0, low1, 0)));
4318 int highequal = ((high0 == 0 && high1 == 0)
4319 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4320 high0, 1, high1, 1)));
4322 /* Make range 0 be the range that starts first, or ends last if they
4323 start at the same value. Swap them if it isn't. */
4324 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4325 low0, 0, low1, 0))
4326 || (lowequal
4327 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4328 high1, 1, high0, 1))))
4330 temp = in0_p, in0_p = in1_p, in1_p = temp;
4331 tem = low0, low0 = low1, low1 = tem;
4332 tem = high0, high0 = high1, high1 = tem;
4335 /* Now flag two cases, whether the ranges are disjoint or whether the
4336 second range is totally subsumed in the first. Note that the tests
4337 below are simplified by the ones above. */
4338 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4339 high0, 1, low1, 0));
4340 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4341 high1, 1, high0, 1));
4343 /* We now have four cases, depending on whether we are including or
4344 excluding the two ranges. */
4345 if (in0_p && in1_p)
4347 /* If they don't overlap, the result is false. If the second range
4348 is a subset it is the result. Otherwise, the range is from the start
4349 of the second to the end of the first. */
4350 if (no_overlap)
4351 in_p = 0, low = high = 0;
4352 else if (subset)
4353 in_p = 1, low = low1, high = high1;
4354 else
4355 in_p = 1, low = low1, high = high0;
4358 else if (in0_p && ! in1_p)
4360 /* If they don't overlap, the result is the first range. If they are
4361 equal, the result is false. If the second range is a subset of the
4362 first, and the ranges begin at the same place, we go from just after
4363 the end of the second range to the end of the first. If the second
4364 range is not a subset of the first, or if it is a subset and both
4365 ranges end at the same place, the range starts at the start of the
4366 first range and ends just before the second range.
4367 Otherwise, we can't describe this as a single range. */
4368 if (no_overlap)
4369 in_p = 1, low = low0, high = high0;
4370 else if (lowequal && highequal)
4371 in_p = 0, low = high = 0;
4372 else if (subset && lowequal)
4374 low = range_successor (high1);
4375 high = high0;
4376 in_p = 1;
4377 if (low == 0)
4379 /* We are in the weird situation where high0 > high1 but
4380 high1 has no successor. Punt. */
4381 return 0;
4384 else if (! subset || highequal)
4386 low = low0;
4387 high = range_predecessor (low1);
4388 in_p = 1;
4389 if (high == 0)
4391 /* low0 < low1 but low1 has no predecessor. Punt. */
4392 return 0;
4395 else
4396 return 0;
4399 else if (! in0_p && in1_p)
4401 /* If they don't overlap, the result is the second range. If the second
4402 is a subset of the first, the result is false. Otherwise,
4403 the range starts just after the first range and ends at the
4404 end of the second. */
4405 if (no_overlap)
4406 in_p = 1, low = low1, high = high1;
4407 else if (subset || highequal)
4408 in_p = 0, low = high = 0;
4409 else
4411 low = range_successor (high0);
4412 high = high1;
4413 in_p = 1;
4414 if (low == 0)
4416 /* high1 > high0 but high0 has no successor. Punt. */
4417 return 0;
4422 else
4424 /* The case where we are excluding both ranges. Here the complex case
4425 is if they don't overlap. In that case, the only time we have a
4426 range is if they are adjacent. If the second is a subset of the
4427 first, the result is the first. Otherwise, the range to exclude
4428 starts at the beginning of the first range and ends at the end of the
4429 second. */
4430 if (no_overlap)
4432 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4433 range_successor (high0),
4434 1, low1, 0)))
4435 in_p = 0, low = low0, high = high1;
4436 else
4438 /* Canonicalize - [min, x] into - [-, x]. */
4439 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4440 switch (TREE_CODE (TREE_TYPE (low0)))
4442 case ENUMERAL_TYPE:
4443 if (TYPE_PRECISION (TREE_TYPE (low0))
4444 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4445 break;
4446 /* FALLTHROUGH */
4447 case INTEGER_TYPE:
4448 if (tree_int_cst_equal (low0,
4449 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4450 low0 = 0;
4451 break;
4452 case POINTER_TYPE:
4453 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4454 && integer_zerop (low0))
4455 low0 = 0;
4456 break;
4457 default:
4458 break;
4461 /* Canonicalize - [x, max] into - [x, -]. */
4462 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4463 switch (TREE_CODE (TREE_TYPE (high1)))
4465 case ENUMERAL_TYPE:
4466 if (TYPE_PRECISION (TREE_TYPE (high1))
4467 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4468 break;
4469 /* FALLTHROUGH */
4470 case INTEGER_TYPE:
4471 if (tree_int_cst_equal (high1,
4472 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4473 high1 = 0;
4474 break;
4475 case POINTER_TYPE:
4476 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4477 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4478 high1, 1,
4479 integer_one_node, 1)))
4480 high1 = 0;
4481 break;
4482 default:
4483 break;
4486 /* The ranges might be also adjacent between the maximum and
4487 minimum values of the given type. For
4488 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4489 return + [x + 1, y - 1]. */
4490 if (low0 == 0 && high1 == 0)
4492 low = range_successor (high0);
4493 high = range_predecessor (low1);
4494 if (low == 0 || high == 0)
4495 return 0;
4497 in_p = 1;
4499 else
4500 return 0;
4503 else if (subset)
4504 in_p = 0, low = low0, high = high0;
4505 else
4506 in_p = 0, low = low0, high = high1;
4509 *pin_p = in_p, *plow = low, *phigh = high;
4510 return 1;
4514 /* Subroutine of fold, looking inside expressions of the form
4515 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4516 of the COND_EXPR. This function is being used also to optimize
4517 A op B ? C : A, by reversing the comparison first.
4519 Return a folded expression whose code is not a COND_EXPR
4520 anymore, or NULL_TREE if no folding opportunity is found. */
4522 static tree
4523 fold_cond_expr_with_comparison (location_t loc, tree type,
4524 tree arg0, tree arg1, tree arg2)
4526 enum tree_code comp_code = TREE_CODE (arg0);
4527 tree arg00 = TREE_OPERAND (arg0, 0);
4528 tree arg01 = TREE_OPERAND (arg0, 1);
4529 tree arg1_type = TREE_TYPE (arg1);
4530 tree tem;
4532 STRIP_NOPS (arg1);
4533 STRIP_NOPS (arg2);
4535 /* If we have A op 0 ? A : -A, consider applying the following
4536 transformations:
4538 A == 0? A : -A same as -A
4539 A != 0? A : -A same as A
4540 A >= 0? A : -A same as abs (A)
4541 A > 0? A : -A same as abs (A)
4542 A <= 0? A : -A same as -abs (A)
4543 A < 0? A : -A same as -abs (A)
4545 None of these transformations work for modes with signed
4546 zeros. If A is +/-0, the first two transformations will
4547 change the sign of the result (from +0 to -0, or vice
4548 versa). The last four will fix the sign of the result,
4549 even though the original expressions could be positive or
4550 negative, depending on the sign of A.
4552 Note that all these transformations are correct if A is
4553 NaN, since the two alternatives (A and -A) are also NaNs. */
4554 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4555 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4556 ? real_zerop (arg01)
4557 : integer_zerop (arg01))
4558 && ((TREE_CODE (arg2) == NEGATE_EXPR
4559 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4560 /* In the case that A is of the form X-Y, '-A' (arg2) may
4561 have already been folded to Y-X, check for that. */
4562 || (TREE_CODE (arg1) == MINUS_EXPR
4563 && TREE_CODE (arg2) == MINUS_EXPR
4564 && operand_equal_p (TREE_OPERAND (arg1, 0),
4565 TREE_OPERAND (arg2, 1), 0)
4566 && operand_equal_p (TREE_OPERAND (arg1, 1),
4567 TREE_OPERAND (arg2, 0), 0))))
4568 switch (comp_code)
4570 case EQ_EXPR:
4571 case UNEQ_EXPR:
4572 tem = fold_convert_loc (loc, arg1_type, arg1);
4573 return pedantic_non_lvalue_loc (loc,
4574 fold_convert_loc (loc, type,
4575 negate_expr (tem)));
4576 case NE_EXPR:
4577 case LTGT_EXPR:
4578 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4579 case UNGE_EXPR:
4580 case UNGT_EXPR:
4581 if (flag_trapping_math)
4582 break;
4583 /* Fall through. */
4584 case GE_EXPR:
4585 case GT_EXPR:
4586 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4587 arg1 = fold_convert_loc (loc, signed_type_for
4588 (TREE_TYPE (arg1)), arg1);
4589 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4590 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4591 case UNLE_EXPR:
4592 case UNLT_EXPR:
4593 if (flag_trapping_math)
4594 break;
4595 case LE_EXPR:
4596 case LT_EXPR:
4597 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4598 arg1 = fold_convert_loc (loc, signed_type_for
4599 (TREE_TYPE (arg1)), arg1);
4600 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4601 return negate_expr (fold_convert_loc (loc, type, tem));
4602 default:
4603 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4604 break;
4607 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4608 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4609 both transformations are correct when A is NaN: A != 0
4610 is then true, and A == 0 is false. */
4612 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4613 && integer_zerop (arg01) && integer_zerop (arg2))
4615 if (comp_code == NE_EXPR)
4616 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4617 else if (comp_code == EQ_EXPR)
4618 return build_int_cst (type, 0);
4621 /* Try some transformations of A op B ? A : B.
4623 A == B? A : B same as B
4624 A != B? A : B same as A
4625 A >= B? A : B same as max (A, B)
4626 A > B? A : B same as max (B, A)
4627 A <= B? A : B same as min (A, B)
4628 A < B? A : B same as min (B, A)
4630 As above, these transformations don't work in the presence
4631 of signed zeros. For example, if A and B are zeros of
4632 opposite sign, the first two transformations will change
4633 the sign of the result. In the last four, the original
4634 expressions give different results for (A=+0, B=-0) and
4635 (A=-0, B=+0), but the transformed expressions do not.
4637 The first two transformations are correct if either A or B
4638 is a NaN. In the first transformation, the condition will
4639 be false, and B will indeed be chosen. In the case of the
4640 second transformation, the condition A != B will be true,
4641 and A will be chosen.
4643 The conversions to max() and min() are not correct if B is
4644 a number and A is not. The conditions in the original
4645 expressions will be false, so all four give B. The min()
4646 and max() versions would give a NaN instead. */
4647 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4648 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4649 /* Avoid these transformations if the COND_EXPR may be used
4650 as an lvalue in the C++ front-end. PR c++/19199. */
4651 && (in_gimple_form
4652 || (strcmp (lang_hooks.name, "GNU C++") != 0
4653 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4654 || ! maybe_lvalue_p (arg1)
4655 || ! maybe_lvalue_p (arg2)))
4657 tree comp_op0 = arg00;
4658 tree comp_op1 = arg01;
4659 tree comp_type = TREE_TYPE (comp_op0);
4661 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4662 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4664 comp_type = type;
4665 comp_op0 = arg1;
4666 comp_op1 = arg2;
4669 switch (comp_code)
4671 case EQ_EXPR:
4672 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4673 case NE_EXPR:
4674 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4675 case LE_EXPR:
4676 case LT_EXPR:
4677 case UNLE_EXPR:
4678 case UNLT_EXPR:
4679 /* In C++ a ?: expression can be an lvalue, so put the
4680 operand which will be used if they are equal first
4681 so that we can convert this back to the
4682 corresponding COND_EXPR. */
4683 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4685 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4686 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4687 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4688 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4689 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4690 comp_op1, comp_op0);
4691 return pedantic_non_lvalue_loc (loc,
4692 fold_convert_loc (loc, type, tem));
4694 break;
4695 case GE_EXPR:
4696 case GT_EXPR:
4697 case UNGE_EXPR:
4698 case UNGT_EXPR:
4699 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4701 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4702 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4703 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4704 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4705 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4706 comp_op1, comp_op0);
4707 return pedantic_non_lvalue_loc (loc,
4708 fold_convert_loc (loc, type, tem));
4710 break;
4711 case UNEQ_EXPR:
4712 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4713 return pedantic_non_lvalue_loc (loc,
4714 fold_convert_loc (loc, type, arg2));
4715 break;
4716 case LTGT_EXPR:
4717 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4718 return pedantic_non_lvalue_loc (loc,
4719 fold_convert_loc (loc, type, arg1));
4720 break;
4721 default:
4722 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4723 break;
4727 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4728 we might still be able to simplify this. For example,
4729 if C1 is one less or one more than C2, this might have started
4730 out as a MIN or MAX and been transformed by this function.
4731 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4733 if (INTEGRAL_TYPE_P (type)
4734 && TREE_CODE (arg01) == INTEGER_CST
4735 && TREE_CODE (arg2) == INTEGER_CST)
4736 switch (comp_code)
4738 case EQ_EXPR:
4739 if (TREE_CODE (arg1) == INTEGER_CST)
4740 break;
4741 /* We can replace A with C1 in this case. */
4742 arg1 = fold_convert_loc (loc, type, arg01);
4743 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4745 case LT_EXPR:
4746 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4747 MIN_EXPR, to preserve the signedness of the comparison. */
4748 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4749 OEP_ONLY_CONST)
4750 && operand_equal_p (arg01,
4751 const_binop (PLUS_EXPR, arg2,
4752 build_int_cst (type, 1)),
4753 OEP_ONLY_CONST))
4755 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4756 fold_convert_loc (loc, TREE_TYPE (arg00),
4757 arg2));
4758 return pedantic_non_lvalue_loc (loc,
4759 fold_convert_loc (loc, type, tem));
4761 break;
4763 case LE_EXPR:
4764 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4765 as above. */
4766 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4767 OEP_ONLY_CONST)
4768 && operand_equal_p (arg01,
4769 const_binop (MINUS_EXPR, arg2,
4770 build_int_cst (type, 1)),
4771 OEP_ONLY_CONST))
4773 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4774 fold_convert_loc (loc, TREE_TYPE (arg00),
4775 arg2));
4776 return pedantic_non_lvalue_loc (loc,
4777 fold_convert_loc (loc, type, tem));
4779 break;
4781 case GT_EXPR:
4782 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4783 MAX_EXPR, to preserve the signedness of the comparison. */
4784 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4785 OEP_ONLY_CONST)
4786 && operand_equal_p (arg01,
4787 const_binop (MINUS_EXPR, arg2,
4788 build_int_cst (type, 1)),
4789 OEP_ONLY_CONST))
4791 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4792 fold_convert_loc (loc, TREE_TYPE (arg00),
4793 arg2));
4794 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4796 break;
4798 case GE_EXPR:
4799 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4800 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4801 OEP_ONLY_CONST)
4802 && operand_equal_p (arg01,
4803 const_binop (PLUS_EXPR, arg2,
4804 build_int_cst (type, 1)),
4805 OEP_ONLY_CONST))
4807 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4808 fold_convert_loc (loc, TREE_TYPE (arg00),
4809 arg2));
4810 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4812 break;
4813 case NE_EXPR:
4814 break;
4815 default:
4816 gcc_unreachable ();
4819 return NULL_TREE;
4824 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4825 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4826 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4827 false) >= 2)
4828 #endif
4830 /* EXP is some logical combination of boolean tests. See if we can
4831 merge it into some range test. Return the new tree if so. */
4833 static tree
4834 fold_range_test (location_t loc, enum tree_code code, tree type,
4835 tree op0, tree op1)
4837 int or_op = (code == TRUTH_ORIF_EXPR
4838 || code == TRUTH_OR_EXPR);
4839 int in0_p, in1_p, in_p;
4840 tree low0, low1, low, high0, high1, high;
4841 bool strict_overflow_p = false;
4842 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4843 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4844 tree tem;
4845 const char * const warnmsg = G_("assuming signed overflow does not occur "
4846 "when simplifying range test");
4848 /* If this is an OR operation, invert both sides; we will invert
4849 again at the end. */
4850 if (or_op)
4851 in0_p = ! in0_p, in1_p = ! in1_p;
4853 /* If both expressions are the same, if we can merge the ranges, and we
4854 can build the range test, return it or it inverted. If one of the
4855 ranges is always true or always false, consider it to be the same
4856 expression as the other. */
4857 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4858 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4859 in1_p, low1, high1)
4860 && 0 != (tem = (build_range_check (loc, type,
4861 lhs != 0 ? lhs
4862 : rhs != 0 ? rhs : integer_zero_node,
4863 in_p, low, high))))
4865 if (strict_overflow_p)
4866 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4867 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4870 /* On machines where the branch cost is expensive, if this is a
4871 short-circuited branch and the underlying object on both sides
4872 is the same, make a non-short-circuit operation. */
4873 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4874 && lhs != 0 && rhs != 0
4875 && (code == TRUTH_ANDIF_EXPR
4876 || code == TRUTH_ORIF_EXPR)
4877 && operand_equal_p (lhs, rhs, 0))
4879 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4880 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4881 which cases we can't do this. */
4882 if (simple_operand_p (lhs))
4883 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4884 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4885 type, op0, op1);
4887 else if (!lang_hooks.decls.global_bindings_p ()
4888 && !CONTAINS_PLACEHOLDER_P (lhs))
4890 tree common = save_expr (lhs);
4892 if (0 != (lhs = build_range_check (loc, type, common,
4893 or_op ? ! in0_p : in0_p,
4894 low0, high0))
4895 && (0 != (rhs = build_range_check (loc, type, common,
4896 or_op ? ! in1_p : in1_p,
4897 low1, high1))))
4899 if (strict_overflow_p)
4900 fold_overflow_warning (warnmsg,
4901 WARN_STRICT_OVERFLOW_COMPARISON);
4902 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4903 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4904 type, lhs, rhs);
4909 return 0;
4912 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4913 bit value. Arrange things so the extra bits will be set to zero if and
4914 only if C is signed-extended to its full width. If MASK is nonzero,
4915 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4917 static tree
4918 unextend (tree c, int p, int unsignedp, tree mask)
4920 tree type = TREE_TYPE (c);
4921 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4922 tree temp;
4924 if (p == modesize || unsignedp)
4925 return c;
4927 /* We work by getting just the sign bit into the low-order bit, then
4928 into the high-order bit, then sign-extend. We then XOR that value
4929 with C. */
4930 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4931 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4933 /* We must use a signed type in order to get an arithmetic right shift.
4934 However, we must also avoid introducing accidental overflows, so that
4935 a subsequent call to integer_zerop will work. Hence we must
4936 do the type conversion here. At this point, the constant is either
4937 zero or one, and the conversion to a signed type can never overflow.
4938 We could get an overflow if this conversion is done anywhere else. */
4939 if (TYPE_UNSIGNED (type))
4940 temp = fold_convert (signed_type_for (type), temp);
4942 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4943 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4944 if (mask != 0)
4945 temp = const_binop (BIT_AND_EXPR, temp,
4946 fold_convert (TREE_TYPE (c), mask));
4947 /* If necessary, convert the type back to match the type of C. */
4948 if (TYPE_UNSIGNED (type))
4949 temp = fold_convert (type, temp);
4951 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4954 /* For an expression that has the form
4955 (A && B) || ~B
4957 (A || B) && ~B,
4958 we can drop one of the inner expressions and simplify to
4959 A || ~B
4961 A && ~B
4962 LOC is the location of the resulting expression. OP is the inner
4963 logical operation; the left-hand side in the examples above, while CMPOP
4964 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4965 removing a condition that guards another, as in
4966 (A != NULL && A->...) || A == NULL
4967 which we must not transform. If RHS_ONLY is true, only eliminate the
4968 right-most operand of the inner logical operation. */
4970 static tree
4971 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4972 bool rhs_only)
4974 tree type = TREE_TYPE (cmpop);
4975 enum tree_code code = TREE_CODE (cmpop);
4976 enum tree_code truthop_code = TREE_CODE (op);
4977 tree lhs = TREE_OPERAND (op, 0);
4978 tree rhs = TREE_OPERAND (op, 1);
4979 tree orig_lhs = lhs, orig_rhs = rhs;
4980 enum tree_code rhs_code = TREE_CODE (rhs);
4981 enum tree_code lhs_code = TREE_CODE (lhs);
4982 enum tree_code inv_code;
4984 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4985 return NULL_TREE;
4987 if (TREE_CODE_CLASS (code) != tcc_comparison)
4988 return NULL_TREE;
4990 if (rhs_code == truthop_code)
4992 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4993 if (newrhs != NULL_TREE)
4995 rhs = newrhs;
4996 rhs_code = TREE_CODE (rhs);
4999 if (lhs_code == truthop_code && !rhs_only)
5001 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5002 if (newlhs != NULL_TREE)
5004 lhs = newlhs;
5005 lhs_code = TREE_CODE (lhs);
5009 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5010 if (inv_code == rhs_code
5011 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5012 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5013 return lhs;
5014 if (!rhs_only && inv_code == lhs_code
5015 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5016 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5017 return rhs;
5018 if (rhs != orig_rhs || lhs != orig_lhs)
5019 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5020 lhs, rhs);
5021 return NULL_TREE;
5024 /* Find ways of folding logical expressions of LHS and RHS:
5025 Try to merge two comparisons to the same innermost item.
5026 Look for range tests like "ch >= '0' && ch <= '9'".
5027 Look for combinations of simple terms on machines with expensive branches
5028 and evaluate the RHS unconditionally.
5030 For example, if we have p->a == 2 && p->b == 4 and we can make an
5031 object large enough to span both A and B, we can do this with a comparison
5032 against the object ANDed with the a mask.
5034 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5035 operations to do this with one comparison.
5037 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5038 function and the one above.
5040 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5041 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5043 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5044 two operands.
5046 We return the simplified tree or 0 if no optimization is possible. */
5048 static tree
5049 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5050 tree lhs, tree rhs)
5052 /* If this is the "or" of two comparisons, we can do something if
5053 the comparisons are NE_EXPR. If this is the "and", we can do something
5054 if the comparisons are EQ_EXPR. I.e.,
5055 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5057 WANTED_CODE is this operation code. For single bit fields, we can
5058 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5059 comparison for one-bit fields. */
5061 enum tree_code wanted_code;
5062 enum tree_code lcode, rcode;
5063 tree ll_arg, lr_arg, rl_arg, rr_arg;
5064 tree ll_inner, lr_inner, rl_inner, rr_inner;
5065 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5066 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5067 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5068 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5069 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5070 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5071 enum machine_mode lnmode, rnmode;
5072 tree ll_mask, lr_mask, rl_mask, rr_mask;
5073 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5074 tree l_const, r_const;
5075 tree lntype, rntype, result;
5076 HOST_WIDE_INT first_bit, end_bit;
5077 int volatilep;
5079 /* Start by getting the comparison codes. Fail if anything is volatile.
5080 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5081 it were surrounded with a NE_EXPR. */
5083 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5084 return 0;
5086 lcode = TREE_CODE (lhs);
5087 rcode = TREE_CODE (rhs);
5089 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5091 lhs = build2 (NE_EXPR, truth_type, lhs,
5092 build_int_cst (TREE_TYPE (lhs), 0));
5093 lcode = NE_EXPR;
5096 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5098 rhs = build2 (NE_EXPR, truth_type, rhs,
5099 build_int_cst (TREE_TYPE (rhs), 0));
5100 rcode = NE_EXPR;
5103 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5104 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5105 return 0;
5107 ll_arg = TREE_OPERAND (lhs, 0);
5108 lr_arg = TREE_OPERAND (lhs, 1);
5109 rl_arg = TREE_OPERAND (rhs, 0);
5110 rr_arg = TREE_OPERAND (rhs, 1);
5112 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5113 if (simple_operand_p (ll_arg)
5114 && simple_operand_p (lr_arg))
5116 if (operand_equal_p (ll_arg, rl_arg, 0)
5117 && operand_equal_p (lr_arg, rr_arg, 0))
5119 result = combine_comparisons (loc, code, lcode, rcode,
5120 truth_type, ll_arg, lr_arg);
5121 if (result)
5122 return result;
5124 else if (operand_equal_p (ll_arg, rr_arg, 0)
5125 && operand_equal_p (lr_arg, rl_arg, 0))
5127 result = combine_comparisons (loc, code, lcode,
5128 swap_tree_comparison (rcode),
5129 truth_type, ll_arg, lr_arg);
5130 if (result)
5131 return result;
5135 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5136 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5138 /* If the RHS can be evaluated unconditionally and its operands are
5139 simple, it wins to evaluate the RHS unconditionally on machines
5140 with expensive branches. In this case, this isn't a comparison
5141 that can be merged. */
5143 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5144 false) >= 2
5145 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5146 && simple_operand_p (rl_arg)
5147 && simple_operand_p (rr_arg))
5149 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5150 if (code == TRUTH_OR_EXPR
5151 && lcode == NE_EXPR && integer_zerop (lr_arg)
5152 && rcode == NE_EXPR && integer_zerop (rr_arg)
5153 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5154 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5155 return build2_loc (loc, NE_EXPR, truth_type,
5156 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5157 ll_arg, rl_arg),
5158 build_int_cst (TREE_TYPE (ll_arg), 0));
5160 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5161 if (code == TRUTH_AND_EXPR
5162 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5163 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5164 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5165 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5166 return build2_loc (loc, EQ_EXPR, truth_type,
5167 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5168 ll_arg, rl_arg),
5169 build_int_cst (TREE_TYPE (ll_arg), 0));
5172 /* See if the comparisons can be merged. Then get all the parameters for
5173 each side. */
5175 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5176 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5177 return 0;
5179 volatilep = 0;
5180 ll_inner = decode_field_reference (loc, ll_arg,
5181 &ll_bitsize, &ll_bitpos, &ll_mode,
5182 &ll_unsignedp, &volatilep, &ll_mask,
5183 &ll_and_mask);
5184 lr_inner = decode_field_reference (loc, lr_arg,
5185 &lr_bitsize, &lr_bitpos, &lr_mode,
5186 &lr_unsignedp, &volatilep, &lr_mask,
5187 &lr_and_mask);
5188 rl_inner = decode_field_reference (loc, rl_arg,
5189 &rl_bitsize, &rl_bitpos, &rl_mode,
5190 &rl_unsignedp, &volatilep, &rl_mask,
5191 &rl_and_mask);
5192 rr_inner = decode_field_reference (loc, rr_arg,
5193 &rr_bitsize, &rr_bitpos, &rr_mode,
5194 &rr_unsignedp, &volatilep, &rr_mask,
5195 &rr_and_mask);
5197 /* It must be true that the inner operation on the lhs of each
5198 comparison must be the same if we are to be able to do anything.
5199 Then see if we have constants. If not, the same must be true for
5200 the rhs's. */
5201 if (volatilep || ll_inner == 0 || rl_inner == 0
5202 || ! operand_equal_p (ll_inner, rl_inner, 0))
5203 return 0;
5205 if (TREE_CODE (lr_arg) == INTEGER_CST
5206 && TREE_CODE (rr_arg) == INTEGER_CST)
5207 l_const = lr_arg, r_const = rr_arg;
5208 else if (lr_inner == 0 || rr_inner == 0
5209 || ! operand_equal_p (lr_inner, rr_inner, 0))
5210 return 0;
5211 else
5212 l_const = r_const = 0;
5214 /* If either comparison code is not correct for our logical operation,
5215 fail. However, we can convert a one-bit comparison against zero into
5216 the opposite comparison against that bit being set in the field. */
5218 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5219 if (lcode != wanted_code)
5221 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5223 /* Make the left operand unsigned, since we are only interested
5224 in the value of one bit. Otherwise we are doing the wrong
5225 thing below. */
5226 ll_unsignedp = 1;
5227 l_const = ll_mask;
5229 else
5230 return 0;
5233 /* This is analogous to the code for l_const above. */
5234 if (rcode != wanted_code)
5236 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5238 rl_unsignedp = 1;
5239 r_const = rl_mask;
5241 else
5242 return 0;
5245 /* See if we can find a mode that contains both fields being compared on
5246 the left. If we can't, fail. Otherwise, update all constants and masks
5247 to be relative to a field of that size. */
5248 first_bit = MIN (ll_bitpos, rl_bitpos);
5249 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5250 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5251 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5252 volatilep);
5253 if (lnmode == VOIDmode)
5254 return 0;
5256 lnbitsize = GET_MODE_BITSIZE (lnmode);
5257 lnbitpos = first_bit & ~ (lnbitsize - 1);
5258 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5259 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5261 if (BYTES_BIG_ENDIAN)
5263 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5264 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5267 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5268 size_int (xll_bitpos));
5269 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5270 size_int (xrl_bitpos));
5272 if (l_const)
5274 l_const = fold_convert_loc (loc, lntype, l_const);
5275 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5276 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5277 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5278 fold_build1_loc (loc, BIT_NOT_EXPR,
5279 lntype, ll_mask))))
5281 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5283 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5286 if (r_const)
5288 r_const = fold_convert_loc (loc, lntype, r_const);
5289 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5290 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5291 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5292 fold_build1_loc (loc, BIT_NOT_EXPR,
5293 lntype, rl_mask))))
5295 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5297 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5301 /* If the right sides are not constant, do the same for it. Also,
5302 disallow this optimization if a size or signedness mismatch occurs
5303 between the left and right sides. */
5304 if (l_const == 0)
5306 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5307 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5308 /* Make sure the two fields on the right
5309 correspond to the left without being swapped. */
5310 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5311 return 0;
5313 first_bit = MIN (lr_bitpos, rr_bitpos);
5314 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5315 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5316 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5317 volatilep);
5318 if (rnmode == VOIDmode)
5319 return 0;
5321 rnbitsize = GET_MODE_BITSIZE (rnmode);
5322 rnbitpos = first_bit & ~ (rnbitsize - 1);
5323 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5324 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5326 if (BYTES_BIG_ENDIAN)
5328 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5329 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5332 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5333 rntype, lr_mask),
5334 size_int (xlr_bitpos));
5335 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5336 rntype, rr_mask),
5337 size_int (xrr_bitpos));
5339 /* Make a mask that corresponds to both fields being compared.
5340 Do this for both items being compared. If the operands are the
5341 same size and the bits being compared are in the same position
5342 then we can do this by masking both and comparing the masked
5343 results. */
5344 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5345 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5346 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5348 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5349 ll_unsignedp || rl_unsignedp);
5350 if (! all_ones_mask_p (ll_mask, lnbitsize))
5351 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5353 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5354 lr_unsignedp || rr_unsignedp);
5355 if (! all_ones_mask_p (lr_mask, rnbitsize))
5356 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5358 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5361 /* There is still another way we can do something: If both pairs of
5362 fields being compared are adjacent, we may be able to make a wider
5363 field containing them both.
5365 Note that we still must mask the lhs/rhs expressions. Furthermore,
5366 the mask must be shifted to account for the shift done by
5367 make_bit_field_ref. */
5368 if ((ll_bitsize + ll_bitpos == rl_bitpos
5369 && lr_bitsize + lr_bitpos == rr_bitpos)
5370 || (ll_bitpos == rl_bitpos + rl_bitsize
5371 && lr_bitpos == rr_bitpos + rr_bitsize))
5373 tree type;
5375 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5376 ll_bitsize + rl_bitsize,
5377 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5378 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5379 lr_bitsize + rr_bitsize,
5380 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5382 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5383 size_int (MIN (xll_bitpos, xrl_bitpos)));
5384 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5385 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5387 /* Convert to the smaller type before masking out unwanted bits. */
5388 type = lntype;
5389 if (lntype != rntype)
5391 if (lnbitsize > rnbitsize)
5393 lhs = fold_convert_loc (loc, rntype, lhs);
5394 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5395 type = rntype;
5397 else if (lnbitsize < rnbitsize)
5399 rhs = fold_convert_loc (loc, lntype, rhs);
5400 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5401 type = lntype;
5405 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5406 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5408 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5409 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5411 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5414 return 0;
5417 /* Handle the case of comparisons with constants. If there is something in
5418 common between the masks, those bits of the constants must be the same.
5419 If not, the condition is always false. Test for this to avoid generating
5420 incorrect code below. */
5421 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5422 if (! integer_zerop (result)
5423 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5424 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5426 if (wanted_code == NE_EXPR)
5428 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5429 return constant_boolean_node (true, truth_type);
5431 else
5433 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5434 return constant_boolean_node (false, truth_type);
5438 /* Construct the expression we will return. First get the component
5439 reference we will make. Unless the mask is all ones the width of
5440 that field, perform the mask operation. Then compare with the
5441 merged constant. */
5442 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5443 ll_unsignedp || rl_unsignedp);
5445 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5446 if (! all_ones_mask_p (ll_mask, lnbitsize))
5447 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5449 return build2_loc (loc, wanted_code, truth_type, result,
5450 const_binop (BIT_IOR_EXPR, l_const, r_const));
5453 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5454 constant. */
5456 static tree
5457 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5458 tree op0, tree op1)
5460 tree arg0 = op0;
5461 enum tree_code op_code;
5462 tree comp_const;
5463 tree minmax_const;
5464 int consts_equal, consts_lt;
5465 tree inner;
5467 STRIP_SIGN_NOPS (arg0);
5469 op_code = TREE_CODE (arg0);
5470 minmax_const = TREE_OPERAND (arg0, 1);
5471 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5472 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5473 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5474 inner = TREE_OPERAND (arg0, 0);
5476 /* If something does not permit us to optimize, return the original tree. */
5477 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5478 || TREE_CODE (comp_const) != INTEGER_CST
5479 || TREE_OVERFLOW (comp_const)
5480 || TREE_CODE (minmax_const) != INTEGER_CST
5481 || TREE_OVERFLOW (minmax_const))
5482 return NULL_TREE;
5484 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5485 and GT_EXPR, doing the rest with recursive calls using logical
5486 simplifications. */
5487 switch (code)
5489 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5491 tree tem
5492 = optimize_minmax_comparison (loc,
5493 invert_tree_comparison (code, false),
5494 type, op0, op1);
5495 if (tem)
5496 return invert_truthvalue_loc (loc, tem);
5497 return NULL_TREE;
5500 case GE_EXPR:
5501 return
5502 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5503 optimize_minmax_comparison
5504 (loc, EQ_EXPR, type, arg0, comp_const),
5505 optimize_minmax_comparison
5506 (loc, GT_EXPR, type, arg0, comp_const));
5508 case EQ_EXPR:
5509 if (op_code == MAX_EXPR && consts_equal)
5510 /* MAX (X, 0) == 0 -> X <= 0 */
5511 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5513 else if (op_code == MAX_EXPR && consts_lt)
5514 /* MAX (X, 0) == 5 -> X == 5 */
5515 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5517 else if (op_code == MAX_EXPR)
5518 /* MAX (X, 0) == -1 -> false */
5519 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5521 else if (consts_equal)
5522 /* MIN (X, 0) == 0 -> X >= 0 */
5523 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5525 else if (consts_lt)
5526 /* MIN (X, 0) == 5 -> false */
5527 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5529 else
5530 /* MIN (X, 0) == -1 -> X == -1 */
5531 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5533 case GT_EXPR:
5534 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5535 /* MAX (X, 0) > 0 -> X > 0
5536 MAX (X, 0) > 5 -> X > 5 */
5537 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5539 else if (op_code == MAX_EXPR)
5540 /* MAX (X, 0) > -1 -> true */
5541 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5543 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5544 /* MIN (X, 0) > 0 -> false
5545 MIN (X, 0) > 5 -> false */
5546 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5548 else
5549 /* MIN (X, 0) > -1 -> X > -1 */
5550 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5552 default:
5553 return NULL_TREE;
5557 /* T is an integer expression that is being multiplied, divided, or taken a
5558 modulus (CODE says which and what kind of divide or modulus) by a
5559 constant C. See if we can eliminate that operation by folding it with
5560 other operations already in T. WIDE_TYPE, if non-null, is a type that
5561 should be used for the computation if wider than our type.
5563 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5564 (X * 2) + (Y * 4). We must, however, be assured that either the original
5565 expression would not overflow or that overflow is undefined for the type
5566 in the language in question.
5568 If we return a non-null expression, it is an equivalent form of the
5569 original computation, but need not be in the original type.
5571 We set *STRICT_OVERFLOW_P to true if the return values depends on
5572 signed overflow being undefined. Otherwise we do not change
5573 *STRICT_OVERFLOW_P. */
5575 static tree
5576 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5577 bool *strict_overflow_p)
5579 /* To avoid exponential search depth, refuse to allow recursion past
5580 three levels. Beyond that (1) it's highly unlikely that we'll find
5581 something interesting and (2) we've probably processed it before
5582 when we built the inner expression. */
5584 static int depth;
5585 tree ret;
5587 if (depth > 3)
5588 return NULL;
5590 depth++;
5591 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5592 depth--;
5594 return ret;
5597 static tree
5598 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5599 bool *strict_overflow_p)
5601 tree type = TREE_TYPE (t);
5602 enum tree_code tcode = TREE_CODE (t);
5603 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5604 > GET_MODE_SIZE (TYPE_MODE (type)))
5605 ? wide_type : type);
5606 tree t1, t2;
5607 int same_p = tcode == code;
5608 tree op0 = NULL_TREE, op1 = NULL_TREE;
5609 bool sub_strict_overflow_p;
5611 /* Don't deal with constants of zero here; they confuse the code below. */
5612 if (integer_zerop (c))
5613 return NULL_TREE;
5615 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5616 op0 = TREE_OPERAND (t, 0);
5618 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5619 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5621 /* Note that we need not handle conditional operations here since fold
5622 already handles those cases. So just do arithmetic here. */
5623 switch (tcode)
5625 case INTEGER_CST:
5626 /* For a constant, we can always simplify if we are a multiply
5627 or (for divide and modulus) if it is a multiple of our constant. */
5628 if (code == MULT_EXPR
5629 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5630 return const_binop (code, fold_convert (ctype, t),
5631 fold_convert (ctype, c));
5632 break;
5634 CASE_CONVERT: case NON_LVALUE_EXPR:
5635 /* If op0 is an expression ... */
5636 if ((COMPARISON_CLASS_P (op0)
5637 || UNARY_CLASS_P (op0)
5638 || BINARY_CLASS_P (op0)
5639 || VL_EXP_CLASS_P (op0)
5640 || EXPRESSION_CLASS_P (op0))
5641 /* ... and has wrapping overflow, and its type is smaller
5642 than ctype, then we cannot pass through as widening. */
5643 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5644 && (TYPE_PRECISION (ctype)
5645 > TYPE_PRECISION (TREE_TYPE (op0))))
5646 /* ... or this is a truncation (t is narrower than op0),
5647 then we cannot pass through this narrowing. */
5648 || (TYPE_PRECISION (type)
5649 < TYPE_PRECISION (TREE_TYPE (op0)))
5650 /* ... or signedness changes for division or modulus,
5651 then we cannot pass through this conversion. */
5652 || (code != MULT_EXPR
5653 && (TYPE_UNSIGNED (ctype)
5654 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5655 /* ... or has undefined overflow while the converted to
5656 type has not, we cannot do the operation in the inner type
5657 as that would introduce undefined overflow. */
5658 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5659 && !TYPE_OVERFLOW_UNDEFINED (type))))
5660 break;
5662 /* Pass the constant down and see if we can make a simplification. If
5663 we can, replace this expression with the inner simplification for
5664 possible later conversion to our or some other type. */
5665 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5666 && TREE_CODE (t2) == INTEGER_CST
5667 && !TREE_OVERFLOW (t2)
5668 && (0 != (t1 = extract_muldiv (op0, t2, code,
5669 code == MULT_EXPR
5670 ? ctype : NULL_TREE,
5671 strict_overflow_p))))
5672 return t1;
5673 break;
5675 case ABS_EXPR:
5676 /* If widening the type changes it from signed to unsigned, then we
5677 must avoid building ABS_EXPR itself as unsigned. */
5678 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5680 tree cstype = (*signed_type_for) (ctype);
5681 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5682 != 0)
5684 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5685 return fold_convert (ctype, t1);
5687 break;
5689 /* If the constant is negative, we cannot simplify this. */
5690 if (tree_int_cst_sgn (c) == -1)
5691 break;
5692 /* FALLTHROUGH */
5693 case NEGATE_EXPR:
5694 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5695 != 0)
5696 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5697 break;
5699 case MIN_EXPR: case MAX_EXPR:
5700 /* If widening the type changes the signedness, then we can't perform
5701 this optimization as that changes the result. */
5702 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5703 break;
5705 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5706 sub_strict_overflow_p = false;
5707 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5708 &sub_strict_overflow_p)) != 0
5709 && (t2 = extract_muldiv (op1, c, code, wide_type,
5710 &sub_strict_overflow_p)) != 0)
5712 if (tree_int_cst_sgn (c) < 0)
5713 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5714 if (sub_strict_overflow_p)
5715 *strict_overflow_p = true;
5716 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5717 fold_convert (ctype, t2));
5719 break;
5721 case LSHIFT_EXPR: case RSHIFT_EXPR:
5722 /* If the second operand is constant, this is a multiplication
5723 or floor division, by a power of two, so we can treat it that
5724 way unless the multiplier or divisor overflows. Signed
5725 left-shift overflow is implementation-defined rather than
5726 undefined in C90, so do not convert signed left shift into
5727 multiplication. */
5728 if (TREE_CODE (op1) == INTEGER_CST
5729 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5730 /* const_binop may not detect overflow correctly,
5731 so check for it explicitly here. */
5732 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5733 && TREE_INT_CST_HIGH (op1) == 0
5734 && 0 != (t1 = fold_convert (ctype,
5735 const_binop (LSHIFT_EXPR,
5736 size_one_node,
5737 op1)))
5738 && !TREE_OVERFLOW (t1))
5739 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5740 ? MULT_EXPR : FLOOR_DIV_EXPR,
5741 ctype,
5742 fold_convert (ctype, op0),
5743 t1),
5744 c, code, wide_type, strict_overflow_p);
5745 break;
5747 case PLUS_EXPR: case MINUS_EXPR:
5748 /* See if we can eliminate the operation on both sides. If we can, we
5749 can return a new PLUS or MINUS. If we can't, the only remaining
5750 cases where we can do anything are if the second operand is a
5751 constant. */
5752 sub_strict_overflow_p = false;
5753 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5754 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5755 if (t1 != 0 && t2 != 0
5756 && (code == MULT_EXPR
5757 /* If not multiplication, we can only do this if both operands
5758 are divisible by c. */
5759 || (multiple_of_p (ctype, op0, c)
5760 && multiple_of_p (ctype, op1, c))))
5762 if (sub_strict_overflow_p)
5763 *strict_overflow_p = true;
5764 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5765 fold_convert (ctype, t2));
5768 /* If this was a subtraction, negate OP1 and set it to be an addition.
5769 This simplifies the logic below. */
5770 if (tcode == MINUS_EXPR)
5772 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5773 /* If OP1 was not easily negatable, the constant may be OP0. */
5774 if (TREE_CODE (op0) == INTEGER_CST)
5776 tree tem = op0;
5777 op0 = op1;
5778 op1 = tem;
5779 tem = t1;
5780 t1 = t2;
5781 t2 = tem;
5785 if (TREE_CODE (op1) != INTEGER_CST)
5786 break;
5788 /* If either OP1 or C are negative, this optimization is not safe for
5789 some of the division and remainder types while for others we need
5790 to change the code. */
5791 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5793 if (code == CEIL_DIV_EXPR)
5794 code = FLOOR_DIV_EXPR;
5795 else if (code == FLOOR_DIV_EXPR)
5796 code = CEIL_DIV_EXPR;
5797 else if (code != MULT_EXPR
5798 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5799 break;
5802 /* If it's a multiply or a division/modulus operation of a multiple
5803 of our constant, do the operation and verify it doesn't overflow. */
5804 if (code == MULT_EXPR
5805 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5807 op1 = const_binop (code, fold_convert (ctype, op1),
5808 fold_convert (ctype, c));
5809 /* We allow the constant to overflow with wrapping semantics. */
5810 if (op1 == 0
5811 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5812 break;
5814 else
5815 break;
5817 /* If we have an unsigned type, we cannot widen the operation since it
5818 will change the result if the original computation overflowed. */
5819 if (TYPE_UNSIGNED (ctype) && ctype != type)
5820 break;
5822 /* If we were able to eliminate our operation from the first side,
5823 apply our operation to the second side and reform the PLUS. */
5824 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5825 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5827 /* The last case is if we are a multiply. In that case, we can
5828 apply the distributive law to commute the multiply and addition
5829 if the multiplication of the constants doesn't overflow. */
5830 if (code == MULT_EXPR)
5831 return fold_build2 (tcode, ctype,
5832 fold_build2 (code, ctype,
5833 fold_convert (ctype, op0),
5834 fold_convert (ctype, c)),
5835 op1);
5837 break;
5839 case MULT_EXPR:
5840 /* We have a special case here if we are doing something like
5841 (C * 8) % 4 since we know that's zero. */
5842 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5843 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5844 /* If the multiplication can overflow we cannot optimize this. */
5845 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5846 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5847 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5849 *strict_overflow_p = true;
5850 return omit_one_operand (type, integer_zero_node, op0);
5853 /* ... fall through ... */
5855 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5856 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5857 /* If we can extract our operation from the LHS, do so and return a
5858 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5859 do something only if the second operand is a constant. */
5860 if (same_p
5861 && (t1 = extract_muldiv (op0, c, code, wide_type,
5862 strict_overflow_p)) != 0)
5863 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5864 fold_convert (ctype, op1));
5865 else if (tcode == MULT_EXPR && code == MULT_EXPR
5866 && (t1 = extract_muldiv (op1, c, code, wide_type,
5867 strict_overflow_p)) != 0)
5868 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5869 fold_convert (ctype, t1));
5870 else if (TREE_CODE (op1) != INTEGER_CST)
5871 return 0;
5873 /* If these are the same operation types, we can associate them
5874 assuming no overflow. */
5875 if (tcode == code)
5877 double_int mul;
5878 bool overflow_p;
5879 unsigned prec = TYPE_PRECISION (ctype);
5880 bool uns = TYPE_UNSIGNED (ctype);
5881 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
5882 double_int dic = tree_to_double_int (c).ext (prec, uns);
5883 mul = diop1.mul_with_sign (dic, false, &overflow_p);
5884 overflow_p = ((!uns && overflow_p)
5885 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5886 if (!double_int_fits_to_tree_p (ctype, mul)
5887 && ((uns && tcode != MULT_EXPR) || !uns))
5888 overflow_p = 1;
5889 if (!overflow_p)
5890 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5891 double_int_to_tree (ctype, mul));
5894 /* If these operations "cancel" each other, we have the main
5895 optimizations of this pass, which occur when either constant is a
5896 multiple of the other, in which case we replace this with either an
5897 operation or CODE or TCODE.
5899 If we have an unsigned type, we cannot do this since it will change
5900 the result if the original computation overflowed. */
5901 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5902 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5903 || (tcode == MULT_EXPR
5904 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5905 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5906 && code != MULT_EXPR)))
5908 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5910 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5911 *strict_overflow_p = true;
5912 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5913 fold_convert (ctype,
5914 const_binop (TRUNC_DIV_EXPR,
5915 op1, c)));
5917 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5919 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5920 *strict_overflow_p = true;
5921 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5922 fold_convert (ctype,
5923 const_binop (TRUNC_DIV_EXPR,
5924 c, op1)));
5927 break;
5929 default:
5930 break;
5933 return 0;
5936 /* Return a node which has the indicated constant VALUE (either 0 or
5937 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5938 and is of the indicated TYPE. */
5940 tree
5941 constant_boolean_node (bool value, tree type)
5943 if (type == integer_type_node)
5944 return value ? integer_one_node : integer_zero_node;
5945 else if (type == boolean_type_node)
5946 return value ? boolean_true_node : boolean_false_node;
5947 else if (TREE_CODE (type) == VECTOR_TYPE)
5948 return build_vector_from_val (type,
5949 build_int_cst (TREE_TYPE (type),
5950 value ? -1 : 0));
5951 else
5952 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5956 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5957 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5958 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5959 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5960 COND is the first argument to CODE; otherwise (as in the example
5961 given here), it is the second argument. TYPE is the type of the
5962 original expression. Return NULL_TREE if no simplification is
5963 possible. */
5965 static tree
5966 fold_binary_op_with_conditional_arg (location_t loc,
5967 enum tree_code code,
5968 tree type, tree op0, tree op1,
5969 tree cond, tree arg, int cond_first_p)
5971 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5972 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5973 tree test, true_value, false_value;
5974 tree lhs = NULL_TREE;
5975 tree rhs = NULL_TREE;
5976 enum tree_code cond_code = COND_EXPR;
5978 if (TREE_CODE (cond) == COND_EXPR
5979 || TREE_CODE (cond) == VEC_COND_EXPR)
5981 test = TREE_OPERAND (cond, 0);
5982 true_value = TREE_OPERAND (cond, 1);
5983 false_value = TREE_OPERAND (cond, 2);
5984 /* If this operand throws an expression, then it does not make
5985 sense to try to perform a logical or arithmetic operation
5986 involving it. */
5987 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5988 lhs = true_value;
5989 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5990 rhs = false_value;
5992 else
5994 tree testtype = TREE_TYPE (cond);
5995 test = cond;
5996 true_value = constant_boolean_node (true, testtype);
5997 false_value = constant_boolean_node (false, testtype);
6000 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6001 cond_code = VEC_COND_EXPR;
6003 /* This transformation is only worthwhile if we don't have to wrap ARG
6004 in a SAVE_EXPR and the operation can be simplified without recursing
6005 on at least one of the branches once its pushed inside the COND_EXPR. */
6006 if (!TREE_CONSTANT (arg)
6007 && (TREE_SIDE_EFFECTS (arg)
6008 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6009 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6010 return NULL_TREE;
6012 arg = fold_convert_loc (loc, arg_type, arg);
6013 if (lhs == 0)
6015 true_value = fold_convert_loc (loc, cond_type, true_value);
6016 if (cond_first_p)
6017 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6018 else
6019 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6021 if (rhs == 0)
6023 false_value = fold_convert_loc (loc, cond_type, false_value);
6024 if (cond_first_p)
6025 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6026 else
6027 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6030 /* Check that we have simplified at least one of the branches. */
6031 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6032 return NULL_TREE;
6034 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6038 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6040 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6041 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6042 ADDEND is the same as X.
6044 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6045 and finite. The problematic cases are when X is zero, and its mode
6046 has signed zeros. In the case of rounding towards -infinity,
6047 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6048 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6050 bool
6051 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6053 if (!real_zerop (addend))
6054 return false;
6056 /* Don't allow the fold with -fsignaling-nans. */
6057 if (HONOR_SNANS (TYPE_MODE (type)))
6058 return false;
6060 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6061 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6062 return true;
6064 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6065 if (TREE_CODE (addend) == REAL_CST
6066 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6067 negate = !negate;
6069 /* The mode has signed zeros, and we have to honor their sign.
6070 In this situation, there is only one case we can return true for.
6071 X - 0 is the same as X unless rounding towards -infinity is
6072 supported. */
6073 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6076 /* Subroutine of fold() that checks comparisons of built-in math
6077 functions against real constants.
6079 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6080 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6081 is the type of the result and ARG0 and ARG1 are the operands of the
6082 comparison. ARG1 must be a TREE_REAL_CST.
6084 The function returns the constant folded tree if a simplification
6085 can be made, and NULL_TREE otherwise. */
6087 static tree
6088 fold_mathfn_compare (location_t loc,
6089 enum built_in_function fcode, enum tree_code code,
6090 tree type, tree arg0, tree arg1)
6092 REAL_VALUE_TYPE c;
6094 if (BUILTIN_SQRT_P (fcode))
6096 tree arg = CALL_EXPR_ARG (arg0, 0);
6097 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6099 c = TREE_REAL_CST (arg1);
6100 if (REAL_VALUE_NEGATIVE (c))
6102 /* sqrt(x) < y is always false, if y is negative. */
6103 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6104 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6106 /* sqrt(x) > y is always true, if y is negative and we
6107 don't care about NaNs, i.e. negative values of x. */
6108 if (code == NE_EXPR || !HONOR_NANS (mode))
6109 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6111 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6112 return fold_build2_loc (loc, GE_EXPR, type, arg,
6113 build_real (TREE_TYPE (arg), dconst0));
6115 else if (code == GT_EXPR || code == GE_EXPR)
6117 REAL_VALUE_TYPE c2;
6119 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6120 real_convert (&c2, mode, &c2);
6122 if (REAL_VALUE_ISINF (c2))
6124 /* sqrt(x) > y is x == +Inf, when y is very large. */
6125 if (HONOR_INFINITIES (mode))
6126 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6127 build_real (TREE_TYPE (arg), c2));
6129 /* sqrt(x) > y is always false, when y is very large
6130 and we don't care about infinities. */
6131 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6134 /* sqrt(x) > c is the same as x > c*c. */
6135 return fold_build2_loc (loc, code, type, arg,
6136 build_real (TREE_TYPE (arg), c2));
6138 else if (code == LT_EXPR || code == LE_EXPR)
6140 REAL_VALUE_TYPE c2;
6142 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6143 real_convert (&c2, mode, &c2);
6145 if (REAL_VALUE_ISINF (c2))
6147 /* sqrt(x) < y is always true, when y is a very large
6148 value and we don't care about NaNs or Infinities. */
6149 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6150 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6152 /* sqrt(x) < y is x != +Inf when y is very large and we
6153 don't care about NaNs. */
6154 if (! HONOR_NANS (mode))
6155 return fold_build2_loc (loc, NE_EXPR, type, arg,
6156 build_real (TREE_TYPE (arg), c2));
6158 /* sqrt(x) < y is x >= 0 when y is very large and we
6159 don't care about Infinities. */
6160 if (! HONOR_INFINITIES (mode))
6161 return fold_build2_loc (loc, GE_EXPR, type, arg,
6162 build_real (TREE_TYPE (arg), dconst0));
6164 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6165 arg = save_expr (arg);
6166 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6167 fold_build2_loc (loc, GE_EXPR, type, arg,
6168 build_real (TREE_TYPE (arg),
6169 dconst0)),
6170 fold_build2_loc (loc, NE_EXPR, type, arg,
6171 build_real (TREE_TYPE (arg),
6172 c2)));
6175 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6176 if (! HONOR_NANS (mode))
6177 return fold_build2_loc (loc, code, type, arg,
6178 build_real (TREE_TYPE (arg), c2));
6180 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6181 arg = save_expr (arg);
6182 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6183 fold_build2_loc (loc, GE_EXPR, type, arg,
6184 build_real (TREE_TYPE (arg),
6185 dconst0)),
6186 fold_build2_loc (loc, code, type, arg,
6187 build_real (TREE_TYPE (arg),
6188 c2)));
6192 return NULL_TREE;
6195 /* Subroutine of fold() that optimizes comparisons against Infinities,
6196 either +Inf or -Inf.
6198 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6199 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6200 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6202 The function returns the constant folded tree if a simplification
6203 can be made, and NULL_TREE otherwise. */
6205 static tree
6206 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6207 tree arg0, tree arg1)
6209 enum machine_mode mode;
6210 REAL_VALUE_TYPE max;
6211 tree temp;
6212 bool neg;
6214 mode = TYPE_MODE (TREE_TYPE (arg0));
6216 /* For negative infinity swap the sense of the comparison. */
6217 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6218 if (neg)
6219 code = swap_tree_comparison (code);
6221 switch (code)
6223 case GT_EXPR:
6224 /* x > +Inf is always false, if with ignore sNANs. */
6225 if (HONOR_SNANS (mode))
6226 return NULL_TREE;
6227 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6229 case LE_EXPR:
6230 /* x <= +Inf is always true, if we don't case about NaNs. */
6231 if (! HONOR_NANS (mode))
6232 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6234 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6235 arg0 = save_expr (arg0);
6236 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6238 case EQ_EXPR:
6239 case GE_EXPR:
6240 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6241 real_maxval (&max, neg, mode);
6242 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6243 arg0, build_real (TREE_TYPE (arg0), max));
6245 case LT_EXPR:
6246 /* x < +Inf is always equal to x <= DBL_MAX. */
6247 real_maxval (&max, neg, mode);
6248 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6249 arg0, build_real (TREE_TYPE (arg0), max));
6251 case NE_EXPR:
6252 /* x != +Inf is always equal to !(x > DBL_MAX). */
6253 real_maxval (&max, neg, mode);
6254 if (! HONOR_NANS (mode))
6255 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6256 arg0, build_real (TREE_TYPE (arg0), max));
6258 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6259 arg0, build_real (TREE_TYPE (arg0), max));
6260 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6262 default:
6263 break;
6266 return NULL_TREE;
6269 /* Subroutine of fold() that optimizes comparisons of a division by
6270 a nonzero integer constant against an integer constant, i.e.
6271 X/C1 op C2.
6273 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6274 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6275 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6277 The function returns the constant folded tree if a simplification
6278 can be made, and NULL_TREE otherwise. */
6280 static tree
6281 fold_div_compare (location_t loc,
6282 enum tree_code code, tree type, tree arg0, tree arg1)
6284 tree prod, tmp, hi, lo;
6285 tree arg00 = TREE_OPERAND (arg0, 0);
6286 tree arg01 = TREE_OPERAND (arg0, 1);
6287 double_int val;
6288 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6289 bool neg_overflow;
6290 bool overflow;
6292 /* We have to do this the hard way to detect unsigned overflow.
6293 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6294 val = TREE_INT_CST (arg01)
6295 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6296 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6297 neg_overflow = false;
6299 if (unsigned_p)
6301 tmp = int_const_binop (MINUS_EXPR, arg01,
6302 build_int_cst (TREE_TYPE (arg01), 1));
6303 lo = prod;
6305 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6306 val = TREE_INT_CST (prod)
6307 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6308 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6309 -1, overflow | TREE_OVERFLOW (prod));
6311 else if (tree_int_cst_sgn (arg01) >= 0)
6313 tmp = int_const_binop (MINUS_EXPR, arg01,
6314 build_int_cst (TREE_TYPE (arg01), 1));
6315 switch (tree_int_cst_sgn (arg1))
6317 case -1:
6318 neg_overflow = true;
6319 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6320 hi = prod;
6321 break;
6323 case 0:
6324 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6325 hi = tmp;
6326 break;
6328 case 1:
6329 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6330 lo = prod;
6331 break;
6333 default:
6334 gcc_unreachable ();
6337 else
6339 /* A negative divisor reverses the relational operators. */
6340 code = swap_tree_comparison (code);
6342 tmp = int_const_binop (PLUS_EXPR, arg01,
6343 build_int_cst (TREE_TYPE (arg01), 1));
6344 switch (tree_int_cst_sgn (arg1))
6346 case -1:
6347 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6348 lo = prod;
6349 break;
6351 case 0:
6352 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6353 lo = tmp;
6354 break;
6356 case 1:
6357 neg_overflow = true;
6358 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6359 hi = prod;
6360 break;
6362 default:
6363 gcc_unreachable ();
6367 switch (code)
6369 case EQ_EXPR:
6370 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6371 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6372 if (TREE_OVERFLOW (hi))
6373 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6374 if (TREE_OVERFLOW (lo))
6375 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6376 return build_range_check (loc, type, arg00, 1, lo, hi);
6378 case NE_EXPR:
6379 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6380 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6381 if (TREE_OVERFLOW (hi))
6382 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6383 if (TREE_OVERFLOW (lo))
6384 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6385 return build_range_check (loc, type, arg00, 0, lo, hi);
6387 case LT_EXPR:
6388 if (TREE_OVERFLOW (lo))
6390 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6391 return omit_one_operand_loc (loc, type, tmp, arg00);
6393 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6395 case LE_EXPR:
6396 if (TREE_OVERFLOW (hi))
6398 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6399 return omit_one_operand_loc (loc, type, tmp, arg00);
6401 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6403 case GT_EXPR:
6404 if (TREE_OVERFLOW (hi))
6406 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6407 return omit_one_operand_loc (loc, type, tmp, arg00);
6409 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6411 case GE_EXPR:
6412 if (TREE_OVERFLOW (lo))
6414 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6415 return omit_one_operand_loc (loc, type, tmp, arg00);
6417 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6419 default:
6420 break;
6423 return NULL_TREE;
6427 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6428 equality/inequality test, then return a simplified form of the test
6429 using a sign testing. Otherwise return NULL. TYPE is the desired
6430 result type. */
6432 static tree
6433 fold_single_bit_test_into_sign_test (location_t loc,
6434 enum tree_code code, tree arg0, tree arg1,
6435 tree result_type)
6437 /* If this is testing a single bit, we can optimize the test. */
6438 if ((code == NE_EXPR || code == EQ_EXPR)
6439 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6440 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6442 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6443 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6444 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6446 if (arg00 != NULL_TREE
6447 /* This is only a win if casting to a signed type is cheap,
6448 i.e. when arg00's type is not a partial mode. */
6449 && TYPE_PRECISION (TREE_TYPE (arg00))
6450 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6452 tree stype = signed_type_for (TREE_TYPE (arg00));
6453 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6454 result_type,
6455 fold_convert_loc (loc, stype, arg00),
6456 build_int_cst (stype, 0));
6460 return NULL_TREE;
6463 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6464 equality/inequality test, then return a simplified form of
6465 the test using shifts and logical operations. Otherwise return
6466 NULL. TYPE is the desired result type. */
6468 tree
6469 fold_single_bit_test (location_t loc, enum tree_code code,
6470 tree arg0, tree arg1, tree result_type)
6472 /* If this is testing a single bit, we can optimize the test. */
6473 if ((code == NE_EXPR || code == EQ_EXPR)
6474 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6475 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6477 tree inner = TREE_OPERAND (arg0, 0);
6478 tree type = TREE_TYPE (arg0);
6479 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6480 enum machine_mode operand_mode = TYPE_MODE (type);
6481 int ops_unsigned;
6482 tree signed_type, unsigned_type, intermediate_type;
6483 tree tem, one;
6485 /* First, see if we can fold the single bit test into a sign-bit
6486 test. */
6487 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6488 result_type);
6489 if (tem)
6490 return tem;
6492 /* Otherwise we have (A & C) != 0 where C is a single bit,
6493 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6494 Similarly for (A & C) == 0. */
6496 /* If INNER is a right shift of a constant and it plus BITNUM does
6497 not overflow, adjust BITNUM and INNER. */
6498 if (TREE_CODE (inner) == RSHIFT_EXPR
6499 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6500 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6501 && bitnum < TYPE_PRECISION (type)
6502 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6503 bitnum - TYPE_PRECISION (type)))
6505 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6506 inner = TREE_OPERAND (inner, 0);
6509 /* If we are going to be able to omit the AND below, we must do our
6510 operations as unsigned. If we must use the AND, we have a choice.
6511 Normally unsigned is faster, but for some machines signed is. */
6512 #ifdef LOAD_EXTEND_OP
6513 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6514 && !flag_syntax_only) ? 0 : 1;
6515 #else
6516 ops_unsigned = 1;
6517 #endif
6519 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6520 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6521 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6522 inner = fold_convert_loc (loc, intermediate_type, inner);
6524 if (bitnum != 0)
6525 inner = build2 (RSHIFT_EXPR, intermediate_type,
6526 inner, size_int (bitnum));
6528 one = build_int_cst (intermediate_type, 1);
6530 if (code == EQ_EXPR)
6531 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6533 /* Put the AND last so it can combine with more things. */
6534 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6536 /* Make sure to return the proper type. */
6537 inner = fold_convert_loc (loc, result_type, inner);
6539 return inner;
6541 return NULL_TREE;
6544 /* Check whether we are allowed to reorder operands arg0 and arg1,
6545 such that the evaluation of arg1 occurs before arg0. */
6547 static bool
6548 reorder_operands_p (const_tree arg0, const_tree arg1)
6550 if (! flag_evaluation_order)
6551 return true;
6552 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6553 return true;
6554 return ! TREE_SIDE_EFFECTS (arg0)
6555 && ! TREE_SIDE_EFFECTS (arg1);
6558 /* Test whether it is preferable two swap two operands, ARG0 and
6559 ARG1, for example because ARG0 is an integer constant and ARG1
6560 isn't. If REORDER is true, only recommend swapping if we can
6561 evaluate the operands in reverse order. */
6563 bool
6564 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6566 STRIP_SIGN_NOPS (arg0);
6567 STRIP_SIGN_NOPS (arg1);
6569 if (TREE_CODE (arg1) == INTEGER_CST)
6570 return 0;
6571 if (TREE_CODE (arg0) == INTEGER_CST)
6572 return 1;
6574 if (TREE_CODE (arg1) == REAL_CST)
6575 return 0;
6576 if (TREE_CODE (arg0) == REAL_CST)
6577 return 1;
6579 if (TREE_CODE (arg1) == FIXED_CST)
6580 return 0;
6581 if (TREE_CODE (arg0) == FIXED_CST)
6582 return 1;
6584 if (TREE_CODE (arg1) == COMPLEX_CST)
6585 return 0;
6586 if (TREE_CODE (arg0) == COMPLEX_CST)
6587 return 1;
6589 if (TREE_CONSTANT (arg1))
6590 return 0;
6591 if (TREE_CONSTANT (arg0))
6592 return 1;
6594 if (optimize_function_for_size_p (cfun))
6595 return 0;
6597 if (reorder && flag_evaluation_order
6598 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6599 return 0;
6601 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6602 for commutative and comparison operators. Ensuring a canonical
6603 form allows the optimizers to find additional redundancies without
6604 having to explicitly check for both orderings. */
6605 if (TREE_CODE (arg0) == SSA_NAME
6606 && TREE_CODE (arg1) == SSA_NAME
6607 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6608 return 1;
6610 /* Put SSA_NAMEs last. */
6611 if (TREE_CODE (arg1) == SSA_NAME)
6612 return 0;
6613 if (TREE_CODE (arg0) == SSA_NAME)
6614 return 1;
6616 /* Put variables last. */
6617 if (DECL_P (arg1))
6618 return 0;
6619 if (DECL_P (arg0))
6620 return 1;
6622 return 0;
6625 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6626 ARG0 is extended to a wider type. */
6628 static tree
6629 fold_widened_comparison (location_t loc, enum tree_code code,
6630 tree type, tree arg0, tree arg1)
6632 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6633 tree arg1_unw;
6634 tree shorter_type, outer_type;
6635 tree min, max;
6636 bool above, below;
6638 if (arg0_unw == arg0)
6639 return NULL_TREE;
6640 shorter_type = TREE_TYPE (arg0_unw);
6642 #ifdef HAVE_canonicalize_funcptr_for_compare
6643 /* Disable this optimization if we're casting a function pointer
6644 type on targets that require function pointer canonicalization. */
6645 if (HAVE_canonicalize_funcptr_for_compare
6646 && TREE_CODE (shorter_type) == POINTER_TYPE
6647 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6648 return NULL_TREE;
6649 #endif
6651 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6652 return NULL_TREE;
6654 arg1_unw = get_unwidened (arg1, NULL_TREE);
6656 /* If possible, express the comparison in the shorter mode. */
6657 if ((code == EQ_EXPR || code == NE_EXPR
6658 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6659 && (TREE_TYPE (arg1_unw) == shorter_type
6660 || ((TYPE_PRECISION (shorter_type)
6661 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6662 && (TYPE_UNSIGNED (shorter_type)
6663 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6664 || (TREE_CODE (arg1_unw) == INTEGER_CST
6665 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6666 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6667 && int_fits_type_p (arg1_unw, shorter_type))))
6668 return fold_build2_loc (loc, code, type, arg0_unw,
6669 fold_convert_loc (loc, shorter_type, arg1_unw));
6671 if (TREE_CODE (arg1_unw) != INTEGER_CST
6672 || TREE_CODE (shorter_type) != INTEGER_TYPE
6673 || !int_fits_type_p (arg1_unw, shorter_type))
6674 return NULL_TREE;
6676 /* If we are comparing with the integer that does not fit into the range
6677 of the shorter type, the result is known. */
6678 outer_type = TREE_TYPE (arg1_unw);
6679 min = lower_bound_in_type (outer_type, shorter_type);
6680 max = upper_bound_in_type (outer_type, shorter_type);
6682 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6683 max, arg1_unw));
6684 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6685 arg1_unw, min));
6687 switch (code)
6689 case EQ_EXPR:
6690 if (above || below)
6691 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6692 break;
6694 case NE_EXPR:
6695 if (above || below)
6696 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6697 break;
6699 case LT_EXPR:
6700 case LE_EXPR:
6701 if (above)
6702 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6703 else if (below)
6704 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6706 case GT_EXPR:
6707 case GE_EXPR:
6708 if (above)
6709 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6710 else if (below)
6711 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6713 default:
6714 break;
6717 return NULL_TREE;
6720 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6721 ARG0 just the signedness is changed. */
6723 static tree
6724 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6725 tree arg0, tree arg1)
6727 tree arg0_inner;
6728 tree inner_type, outer_type;
6730 if (!CONVERT_EXPR_P (arg0))
6731 return NULL_TREE;
6733 outer_type = TREE_TYPE (arg0);
6734 arg0_inner = TREE_OPERAND (arg0, 0);
6735 inner_type = TREE_TYPE (arg0_inner);
6737 #ifdef HAVE_canonicalize_funcptr_for_compare
6738 /* Disable this optimization if we're casting a function pointer
6739 type on targets that require function pointer canonicalization. */
6740 if (HAVE_canonicalize_funcptr_for_compare
6741 && TREE_CODE (inner_type) == POINTER_TYPE
6742 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6743 return NULL_TREE;
6744 #endif
6746 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6747 return NULL_TREE;
6749 if (TREE_CODE (arg1) != INTEGER_CST
6750 && !(CONVERT_EXPR_P (arg1)
6751 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6752 return NULL_TREE;
6754 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6755 && code != NE_EXPR
6756 && code != EQ_EXPR)
6757 return NULL_TREE;
6759 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6760 return NULL_TREE;
6762 if (TREE_CODE (arg1) == INTEGER_CST)
6763 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6764 0, TREE_OVERFLOW (arg1));
6765 else
6766 arg1 = fold_convert_loc (loc, inner_type, arg1);
6768 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6771 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6772 step of the array. Reconstructs s and delta in the case of s *
6773 delta being an integer constant (and thus already folded). ADDR is
6774 the address. MULT is the multiplicative expression. If the
6775 function succeeds, the new address expression is returned.
6776 Otherwise NULL_TREE is returned. LOC is the location of the
6777 resulting expression. */
6779 static tree
6780 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6782 tree s, delta, step;
6783 tree ref = TREE_OPERAND (addr, 0), pref;
6784 tree ret, pos;
6785 tree itype;
6786 bool mdim = false;
6788 /* Strip the nops that might be added when converting op1 to sizetype. */
6789 STRIP_NOPS (op1);
6791 /* Canonicalize op1 into a possibly non-constant delta
6792 and an INTEGER_CST s. */
6793 if (TREE_CODE (op1) == MULT_EXPR)
6795 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6797 STRIP_NOPS (arg0);
6798 STRIP_NOPS (arg1);
6800 if (TREE_CODE (arg0) == INTEGER_CST)
6802 s = arg0;
6803 delta = arg1;
6805 else if (TREE_CODE (arg1) == INTEGER_CST)
6807 s = arg1;
6808 delta = arg0;
6810 else
6811 return NULL_TREE;
6813 else if (TREE_CODE (op1) == INTEGER_CST)
6815 delta = op1;
6816 s = NULL_TREE;
6818 else
6820 /* Simulate we are delta * 1. */
6821 delta = op1;
6822 s = integer_one_node;
6825 /* Handle &x.array the same as we would handle &x.array[0]. */
6826 if (TREE_CODE (ref) == COMPONENT_REF
6827 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6829 tree domain;
6831 /* Remember if this was a multi-dimensional array. */
6832 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6833 mdim = true;
6835 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6836 if (! domain)
6837 goto cont;
6838 itype = TREE_TYPE (domain);
6840 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6841 if (TREE_CODE (step) != INTEGER_CST)
6842 goto cont;
6844 if (s)
6846 if (! tree_int_cst_equal (step, s))
6847 goto cont;
6849 else
6851 /* Try if delta is a multiple of step. */
6852 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6853 if (! tmp)
6854 goto cont;
6855 delta = tmp;
6858 /* Only fold here if we can verify we do not overflow one
6859 dimension of a multi-dimensional array. */
6860 if (mdim)
6862 tree tmp;
6864 if (!TYPE_MIN_VALUE (domain)
6865 || !TYPE_MAX_VALUE (domain)
6866 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6867 goto cont;
6869 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6870 fold_convert_loc (loc, itype,
6871 TYPE_MIN_VALUE (domain)),
6872 fold_convert_loc (loc, itype, delta));
6873 if (TREE_CODE (tmp) != INTEGER_CST
6874 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6875 goto cont;
6878 /* We found a suitable component reference. */
6880 pref = TREE_OPERAND (addr, 0);
6881 ret = copy_node (pref);
6882 SET_EXPR_LOCATION (ret, loc);
6884 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6885 fold_build2_loc
6886 (loc, PLUS_EXPR, itype,
6887 fold_convert_loc (loc, itype,
6888 TYPE_MIN_VALUE
6889 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6890 fold_convert_loc (loc, itype, delta)),
6891 NULL_TREE, NULL_TREE);
6892 return build_fold_addr_expr_loc (loc, ret);
6895 cont:
6897 for (;; ref = TREE_OPERAND (ref, 0))
6899 if (TREE_CODE (ref) == ARRAY_REF)
6901 tree domain;
6903 /* Remember if this was a multi-dimensional array. */
6904 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6905 mdim = true;
6907 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6908 if (! domain)
6909 continue;
6910 itype = TREE_TYPE (domain);
6912 step = array_ref_element_size (ref);
6913 if (TREE_CODE (step) != INTEGER_CST)
6914 continue;
6916 if (s)
6918 if (! tree_int_cst_equal (step, s))
6919 continue;
6921 else
6923 /* Try if delta is a multiple of step. */
6924 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6925 if (! tmp)
6926 continue;
6927 delta = tmp;
6930 /* Only fold here if we can verify we do not overflow one
6931 dimension of a multi-dimensional array. */
6932 if (mdim)
6934 tree tmp;
6936 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6937 || !TYPE_MAX_VALUE (domain)
6938 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6939 continue;
6941 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6942 fold_convert_loc (loc, itype,
6943 TREE_OPERAND (ref, 1)),
6944 fold_convert_loc (loc, itype, delta));
6945 if (!tmp
6946 || TREE_CODE (tmp) != INTEGER_CST
6947 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6948 continue;
6951 break;
6953 else
6954 mdim = false;
6956 if (!handled_component_p (ref))
6957 return NULL_TREE;
6960 /* We found the suitable array reference. So copy everything up to it,
6961 and replace the index. */
6963 pref = TREE_OPERAND (addr, 0);
6964 ret = copy_node (pref);
6965 SET_EXPR_LOCATION (ret, loc);
6966 pos = ret;
6968 while (pref != ref)
6970 pref = TREE_OPERAND (pref, 0);
6971 TREE_OPERAND (pos, 0) = copy_node (pref);
6972 pos = TREE_OPERAND (pos, 0);
6975 TREE_OPERAND (pos, 1)
6976 = fold_build2_loc (loc, PLUS_EXPR, itype,
6977 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
6978 fold_convert_loc (loc, itype, delta));
6979 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6983 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6984 means A >= Y && A != MAX, but in this case we know that
6985 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6987 static tree
6988 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6990 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6992 if (TREE_CODE (bound) == LT_EXPR)
6993 a = TREE_OPERAND (bound, 0);
6994 else if (TREE_CODE (bound) == GT_EXPR)
6995 a = TREE_OPERAND (bound, 1);
6996 else
6997 return NULL_TREE;
6999 typea = TREE_TYPE (a);
7000 if (!INTEGRAL_TYPE_P (typea)
7001 && !POINTER_TYPE_P (typea))
7002 return NULL_TREE;
7004 if (TREE_CODE (ineq) == LT_EXPR)
7006 a1 = TREE_OPERAND (ineq, 1);
7007 y = TREE_OPERAND (ineq, 0);
7009 else if (TREE_CODE (ineq) == GT_EXPR)
7011 a1 = TREE_OPERAND (ineq, 0);
7012 y = TREE_OPERAND (ineq, 1);
7014 else
7015 return NULL_TREE;
7017 if (TREE_TYPE (a1) != typea)
7018 return NULL_TREE;
7020 if (POINTER_TYPE_P (typea))
7022 /* Convert the pointer types into integer before taking the difference. */
7023 tree ta = fold_convert_loc (loc, ssizetype, a);
7024 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7025 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7027 else
7028 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7030 if (!diff || !integer_onep (diff))
7031 return NULL_TREE;
7033 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7036 /* Fold a sum or difference of at least one multiplication.
7037 Returns the folded tree or NULL if no simplification could be made. */
7039 static tree
7040 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7041 tree arg0, tree arg1)
7043 tree arg00, arg01, arg10, arg11;
7044 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7046 /* (A * C) +- (B * C) -> (A+-B) * C.
7047 (A * C) +- A -> A * (C+-1).
7048 We are most concerned about the case where C is a constant,
7049 but other combinations show up during loop reduction. Since
7050 it is not difficult, try all four possibilities. */
7052 if (TREE_CODE (arg0) == MULT_EXPR)
7054 arg00 = TREE_OPERAND (arg0, 0);
7055 arg01 = TREE_OPERAND (arg0, 1);
7057 else if (TREE_CODE (arg0) == INTEGER_CST)
7059 arg00 = build_one_cst (type);
7060 arg01 = arg0;
7062 else
7064 /* We cannot generate constant 1 for fract. */
7065 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7066 return NULL_TREE;
7067 arg00 = arg0;
7068 arg01 = build_one_cst (type);
7070 if (TREE_CODE (arg1) == MULT_EXPR)
7072 arg10 = TREE_OPERAND (arg1, 0);
7073 arg11 = TREE_OPERAND (arg1, 1);
7075 else if (TREE_CODE (arg1) == INTEGER_CST)
7077 arg10 = build_one_cst (type);
7078 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7079 the purpose of this canonicalization. */
7080 if (TREE_INT_CST_HIGH (arg1) == -1
7081 && negate_expr_p (arg1)
7082 && code == PLUS_EXPR)
7084 arg11 = negate_expr (arg1);
7085 code = MINUS_EXPR;
7087 else
7088 arg11 = arg1;
7090 else
7092 /* We cannot generate constant 1 for fract. */
7093 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7094 return NULL_TREE;
7095 arg10 = arg1;
7096 arg11 = build_one_cst (type);
7098 same = NULL_TREE;
7100 if (operand_equal_p (arg01, arg11, 0))
7101 same = arg01, alt0 = arg00, alt1 = arg10;
7102 else if (operand_equal_p (arg00, arg10, 0))
7103 same = arg00, alt0 = arg01, alt1 = arg11;
7104 else if (operand_equal_p (arg00, arg11, 0))
7105 same = arg00, alt0 = arg01, alt1 = arg10;
7106 else if (operand_equal_p (arg01, arg10, 0))
7107 same = arg01, alt0 = arg00, alt1 = arg11;
7109 /* No identical multiplicands; see if we can find a common
7110 power-of-two factor in non-power-of-two multiplies. This
7111 can help in multi-dimensional array access. */
7112 else if (host_integerp (arg01, 0)
7113 && host_integerp (arg11, 0))
7115 HOST_WIDE_INT int01, int11, tmp;
7116 bool swap = false;
7117 tree maybe_same;
7118 int01 = TREE_INT_CST_LOW (arg01);
7119 int11 = TREE_INT_CST_LOW (arg11);
7121 /* Move min of absolute values to int11. */
7122 if (absu_hwi (int01) < absu_hwi (int11))
7124 tmp = int01, int01 = int11, int11 = tmp;
7125 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7126 maybe_same = arg01;
7127 swap = true;
7129 else
7130 maybe_same = arg11;
7132 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7133 /* The remainder should not be a constant, otherwise we
7134 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7135 increased the number of multiplications necessary. */
7136 && TREE_CODE (arg10) != INTEGER_CST)
7138 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7139 build_int_cst (TREE_TYPE (arg00),
7140 int01 / int11));
7141 alt1 = arg10;
7142 same = maybe_same;
7143 if (swap)
7144 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7148 if (same)
7149 return fold_build2_loc (loc, MULT_EXPR, type,
7150 fold_build2_loc (loc, code, type,
7151 fold_convert_loc (loc, type, alt0),
7152 fold_convert_loc (loc, type, alt1)),
7153 fold_convert_loc (loc, type, same));
7155 return NULL_TREE;
7158 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7159 specified by EXPR into the buffer PTR of length LEN bytes.
7160 Return the number of bytes placed in the buffer, or zero
7161 upon failure. */
7163 static int
7164 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7166 tree type = TREE_TYPE (expr);
7167 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7168 int byte, offset, word, words;
7169 unsigned char value;
7171 if (total_bytes > len)
7172 return 0;
7173 words = total_bytes / UNITS_PER_WORD;
7175 for (byte = 0; byte < total_bytes; byte++)
7177 int bitpos = byte * BITS_PER_UNIT;
7178 if (bitpos < HOST_BITS_PER_WIDE_INT)
7179 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7180 else
7181 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7182 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7184 if (total_bytes > UNITS_PER_WORD)
7186 word = byte / UNITS_PER_WORD;
7187 if (WORDS_BIG_ENDIAN)
7188 word = (words - 1) - word;
7189 offset = word * UNITS_PER_WORD;
7190 if (BYTES_BIG_ENDIAN)
7191 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7192 else
7193 offset += byte % UNITS_PER_WORD;
7195 else
7196 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7197 ptr[offset] = value;
7199 return total_bytes;
7203 /* Subroutine of native_encode_expr. Encode the REAL_CST
7204 specified by EXPR into the buffer PTR of length LEN bytes.
7205 Return the number of bytes placed in the buffer, or zero
7206 upon failure. */
7208 static int
7209 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7211 tree type = TREE_TYPE (expr);
7212 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7213 int byte, offset, word, words, bitpos;
7214 unsigned char value;
7216 /* There are always 32 bits in each long, no matter the size of
7217 the hosts long. We handle floating point representations with
7218 up to 192 bits. */
7219 long tmp[6];
7221 if (total_bytes > len)
7222 return 0;
7223 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7225 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7227 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7228 bitpos += BITS_PER_UNIT)
7230 byte = (bitpos / BITS_PER_UNIT) & 3;
7231 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7233 if (UNITS_PER_WORD < 4)
7235 word = byte / UNITS_PER_WORD;
7236 if (WORDS_BIG_ENDIAN)
7237 word = (words - 1) - word;
7238 offset = word * UNITS_PER_WORD;
7239 if (BYTES_BIG_ENDIAN)
7240 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7241 else
7242 offset += byte % UNITS_PER_WORD;
7244 else
7245 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7246 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7248 return total_bytes;
7251 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7252 specified by EXPR into the buffer PTR of length LEN bytes.
7253 Return the number of bytes placed in the buffer, or zero
7254 upon failure. */
7256 static int
7257 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7259 int rsize, isize;
7260 tree part;
7262 part = TREE_REALPART (expr);
7263 rsize = native_encode_expr (part, ptr, len);
7264 if (rsize == 0)
7265 return 0;
7266 part = TREE_IMAGPART (expr);
7267 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7268 if (isize != rsize)
7269 return 0;
7270 return rsize + isize;
7274 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7275 specified by EXPR into the buffer PTR of length LEN bytes.
7276 Return the number of bytes placed in the buffer, or zero
7277 upon failure. */
7279 static int
7280 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7282 unsigned i, count;
7283 int size, offset;
7284 tree itype, elem;
7286 offset = 0;
7287 count = VECTOR_CST_NELTS (expr);
7288 itype = TREE_TYPE (TREE_TYPE (expr));
7289 size = GET_MODE_SIZE (TYPE_MODE (itype));
7290 for (i = 0; i < count; i++)
7292 elem = VECTOR_CST_ELT (expr, i);
7293 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7294 return 0;
7295 offset += size;
7297 return offset;
7301 /* Subroutine of native_encode_expr. Encode the STRING_CST
7302 specified by EXPR into the buffer PTR of length LEN bytes.
7303 Return the number of bytes placed in the buffer, or zero
7304 upon failure. */
7306 static int
7307 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7309 tree type = TREE_TYPE (expr);
7310 HOST_WIDE_INT total_bytes;
7312 if (TREE_CODE (type) != ARRAY_TYPE
7313 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7314 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7315 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7316 return 0;
7317 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7318 if (total_bytes > len)
7319 return 0;
7320 if (TREE_STRING_LENGTH (expr) < total_bytes)
7322 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7323 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7324 total_bytes - TREE_STRING_LENGTH (expr));
7326 else
7327 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7328 return total_bytes;
7332 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7333 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7334 buffer PTR of length LEN bytes. Return the number of bytes
7335 placed in the buffer, or zero upon failure. */
7338 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7340 switch (TREE_CODE (expr))
7342 case INTEGER_CST:
7343 return native_encode_int (expr, ptr, len);
7345 case REAL_CST:
7346 return native_encode_real (expr, ptr, len);
7348 case COMPLEX_CST:
7349 return native_encode_complex (expr, ptr, len);
7351 case VECTOR_CST:
7352 return native_encode_vector (expr, ptr, len);
7354 case STRING_CST:
7355 return native_encode_string (expr, ptr, len);
7357 default:
7358 return 0;
7363 /* Subroutine of native_interpret_expr. Interpret the contents of
7364 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7365 If the buffer cannot be interpreted, return NULL_TREE. */
7367 static tree
7368 native_interpret_int (tree type, const unsigned char *ptr, int len)
7370 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7371 int byte, offset, word, words;
7372 unsigned char value;
7373 double_int result;
7375 if (total_bytes > len)
7376 return NULL_TREE;
7377 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7378 return NULL_TREE;
7380 result = double_int_zero;
7381 words = total_bytes / UNITS_PER_WORD;
7383 for (byte = 0; byte < total_bytes; byte++)
7385 int bitpos = byte * BITS_PER_UNIT;
7386 if (total_bytes > UNITS_PER_WORD)
7388 word = byte / UNITS_PER_WORD;
7389 if (WORDS_BIG_ENDIAN)
7390 word = (words - 1) - word;
7391 offset = word * UNITS_PER_WORD;
7392 if (BYTES_BIG_ENDIAN)
7393 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7394 else
7395 offset += byte % UNITS_PER_WORD;
7397 else
7398 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7399 value = ptr[offset];
7401 if (bitpos < HOST_BITS_PER_WIDE_INT)
7402 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7403 else
7404 result.high |= (unsigned HOST_WIDE_INT) value
7405 << (bitpos - HOST_BITS_PER_WIDE_INT);
7408 return double_int_to_tree (type, result);
7412 /* Subroutine of native_interpret_expr. Interpret the contents of
7413 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7414 If the buffer cannot be interpreted, return NULL_TREE. */
7416 static tree
7417 native_interpret_real (tree type, const unsigned char *ptr, int len)
7419 enum machine_mode mode = TYPE_MODE (type);
7420 int total_bytes = GET_MODE_SIZE (mode);
7421 int byte, offset, word, words, bitpos;
7422 unsigned char value;
7423 /* There are always 32 bits in each long, no matter the size of
7424 the hosts long. We handle floating point representations with
7425 up to 192 bits. */
7426 REAL_VALUE_TYPE r;
7427 long tmp[6];
7429 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7430 if (total_bytes > len || total_bytes > 24)
7431 return NULL_TREE;
7432 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7434 memset (tmp, 0, sizeof (tmp));
7435 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7436 bitpos += BITS_PER_UNIT)
7438 byte = (bitpos / BITS_PER_UNIT) & 3;
7439 if (UNITS_PER_WORD < 4)
7441 word = byte / UNITS_PER_WORD;
7442 if (WORDS_BIG_ENDIAN)
7443 word = (words - 1) - word;
7444 offset = word * UNITS_PER_WORD;
7445 if (BYTES_BIG_ENDIAN)
7446 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7447 else
7448 offset += byte % UNITS_PER_WORD;
7450 else
7451 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7452 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7454 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7457 real_from_target (&r, tmp, mode);
7458 return build_real (type, r);
7462 /* Subroutine of native_interpret_expr. Interpret the contents of
7463 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7464 If the buffer cannot be interpreted, return NULL_TREE. */
7466 static tree
7467 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7469 tree etype, rpart, ipart;
7470 int size;
7472 etype = TREE_TYPE (type);
7473 size = GET_MODE_SIZE (TYPE_MODE (etype));
7474 if (size * 2 > len)
7475 return NULL_TREE;
7476 rpart = native_interpret_expr (etype, ptr, size);
7477 if (!rpart)
7478 return NULL_TREE;
7479 ipart = native_interpret_expr (etype, ptr+size, size);
7480 if (!ipart)
7481 return NULL_TREE;
7482 return build_complex (type, rpart, ipart);
7486 /* Subroutine of native_interpret_expr. Interpret the contents of
7487 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7488 If the buffer cannot be interpreted, return NULL_TREE. */
7490 static tree
7491 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7493 tree etype, elem;
7494 int i, size, count;
7495 tree *elements;
7497 etype = TREE_TYPE (type);
7498 size = GET_MODE_SIZE (TYPE_MODE (etype));
7499 count = TYPE_VECTOR_SUBPARTS (type);
7500 if (size * count > len)
7501 return NULL_TREE;
7503 elements = XALLOCAVEC (tree, count);
7504 for (i = count - 1; i >= 0; i--)
7506 elem = native_interpret_expr (etype, ptr+(i*size), size);
7507 if (!elem)
7508 return NULL_TREE;
7509 elements[i] = elem;
7511 return build_vector (type, elements);
7515 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7516 the buffer PTR of length LEN as a constant of type TYPE. For
7517 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7518 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7519 return NULL_TREE. */
7521 tree
7522 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7524 switch (TREE_CODE (type))
7526 case INTEGER_TYPE:
7527 case ENUMERAL_TYPE:
7528 case BOOLEAN_TYPE:
7529 case POINTER_TYPE:
7530 case REFERENCE_TYPE:
7531 return native_interpret_int (type, ptr, len);
7533 case REAL_TYPE:
7534 return native_interpret_real (type, ptr, len);
7536 case COMPLEX_TYPE:
7537 return native_interpret_complex (type, ptr, len);
7539 case VECTOR_TYPE:
7540 return native_interpret_vector (type, ptr, len);
7542 default:
7543 return NULL_TREE;
7547 /* Returns true if we can interpret the contents of a native encoding
7548 as TYPE. */
7550 static bool
7551 can_native_interpret_type_p (tree type)
7553 switch (TREE_CODE (type))
7555 case INTEGER_TYPE:
7556 case ENUMERAL_TYPE:
7557 case BOOLEAN_TYPE:
7558 case POINTER_TYPE:
7559 case REFERENCE_TYPE:
7560 case REAL_TYPE:
7561 case COMPLEX_TYPE:
7562 case VECTOR_TYPE:
7563 return true;
7564 default:
7565 return false;
7569 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7570 TYPE at compile-time. If we're unable to perform the conversion
7571 return NULL_TREE. */
7573 static tree
7574 fold_view_convert_expr (tree type, tree expr)
7576 /* We support up to 512-bit values (for V8DFmode). */
7577 unsigned char buffer[64];
7578 int len;
7580 /* Check that the host and target are sane. */
7581 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7582 return NULL_TREE;
7584 len = native_encode_expr (expr, buffer, sizeof (buffer));
7585 if (len == 0)
7586 return NULL_TREE;
7588 return native_interpret_expr (type, buffer, len);
7591 /* Build an expression for the address of T. Folds away INDIRECT_REF
7592 to avoid confusing the gimplify process. */
7594 tree
7595 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7597 /* The size of the object is not relevant when talking about its address. */
7598 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7599 t = TREE_OPERAND (t, 0);
7601 if (TREE_CODE (t) == INDIRECT_REF)
7603 t = TREE_OPERAND (t, 0);
7605 if (TREE_TYPE (t) != ptrtype)
7606 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7608 else if (TREE_CODE (t) == MEM_REF
7609 && integer_zerop (TREE_OPERAND (t, 1)))
7610 return TREE_OPERAND (t, 0);
7611 else if (TREE_CODE (t) == MEM_REF
7612 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7613 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7614 TREE_OPERAND (t, 0),
7615 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7616 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7618 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7620 if (TREE_TYPE (t) != ptrtype)
7621 t = fold_convert_loc (loc, ptrtype, t);
7623 else
7624 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7626 return t;
7629 /* Build an expression for the address of T. */
7631 tree
7632 build_fold_addr_expr_loc (location_t loc, tree t)
7634 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7636 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7639 static bool vec_cst_ctor_to_array (tree, tree *);
7641 /* Fold a unary expression of code CODE and type TYPE with operand
7642 OP0. Return the folded expression if folding is successful.
7643 Otherwise, return NULL_TREE. */
7645 tree
7646 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7648 tree tem;
7649 tree arg0;
7650 enum tree_code_class kind = TREE_CODE_CLASS (code);
7652 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7653 && TREE_CODE_LENGTH (code) == 1);
7655 arg0 = op0;
7656 if (arg0)
7658 if (CONVERT_EXPR_CODE_P (code)
7659 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7661 /* Don't use STRIP_NOPS, because signedness of argument type
7662 matters. */
7663 STRIP_SIGN_NOPS (arg0);
7665 else
7667 /* Strip any conversions that don't change the mode. This
7668 is safe for every expression, except for a comparison
7669 expression because its signedness is derived from its
7670 operands.
7672 Note that this is done as an internal manipulation within
7673 the constant folder, in order to find the simplest
7674 representation of the arguments so that their form can be
7675 studied. In any cases, the appropriate type conversions
7676 should be put back in the tree that will get out of the
7677 constant folder. */
7678 STRIP_NOPS (arg0);
7682 if (TREE_CODE_CLASS (code) == tcc_unary)
7684 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7685 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7686 fold_build1_loc (loc, code, type,
7687 fold_convert_loc (loc, TREE_TYPE (op0),
7688 TREE_OPERAND (arg0, 1))));
7689 else if (TREE_CODE (arg0) == COND_EXPR)
7691 tree arg01 = TREE_OPERAND (arg0, 1);
7692 tree arg02 = TREE_OPERAND (arg0, 2);
7693 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7694 arg01 = fold_build1_loc (loc, code, type,
7695 fold_convert_loc (loc,
7696 TREE_TYPE (op0), arg01));
7697 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7698 arg02 = fold_build1_loc (loc, code, type,
7699 fold_convert_loc (loc,
7700 TREE_TYPE (op0), arg02));
7701 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7702 arg01, arg02);
7704 /* If this was a conversion, and all we did was to move into
7705 inside the COND_EXPR, bring it back out. But leave it if
7706 it is a conversion from integer to integer and the
7707 result precision is no wider than a word since such a
7708 conversion is cheap and may be optimized away by combine,
7709 while it couldn't if it were outside the COND_EXPR. Then return
7710 so we don't get into an infinite recursion loop taking the
7711 conversion out and then back in. */
7713 if ((CONVERT_EXPR_CODE_P (code)
7714 || code == NON_LVALUE_EXPR)
7715 && TREE_CODE (tem) == COND_EXPR
7716 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7717 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7718 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7719 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7720 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7721 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7722 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7723 && (INTEGRAL_TYPE_P
7724 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7725 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7726 || flag_syntax_only))
7727 tem = build1_loc (loc, code, type,
7728 build3 (COND_EXPR,
7729 TREE_TYPE (TREE_OPERAND
7730 (TREE_OPERAND (tem, 1), 0)),
7731 TREE_OPERAND (tem, 0),
7732 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7733 TREE_OPERAND (TREE_OPERAND (tem, 2),
7734 0)));
7735 return tem;
7739 switch (code)
7741 case PAREN_EXPR:
7742 /* Re-association barriers around constants and other re-association
7743 barriers can be removed. */
7744 if (CONSTANT_CLASS_P (op0)
7745 || TREE_CODE (op0) == PAREN_EXPR)
7746 return fold_convert_loc (loc, type, op0);
7747 return NULL_TREE;
7749 CASE_CONVERT:
7750 case FLOAT_EXPR:
7751 case FIX_TRUNC_EXPR:
7752 if (TREE_TYPE (op0) == type)
7753 return op0;
7755 if (COMPARISON_CLASS_P (op0))
7757 /* If we have (type) (a CMP b) and type is an integral type, return
7758 new expression involving the new type. Canonicalize
7759 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7760 non-integral type.
7761 Do not fold the result as that would not simplify further, also
7762 folding again results in recursions. */
7763 if (TREE_CODE (type) == BOOLEAN_TYPE)
7764 return build2_loc (loc, TREE_CODE (op0), type,
7765 TREE_OPERAND (op0, 0),
7766 TREE_OPERAND (op0, 1));
7767 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7768 && TREE_CODE (type) != VECTOR_TYPE)
7769 return build3_loc (loc, COND_EXPR, type, op0,
7770 constant_boolean_node (true, type),
7771 constant_boolean_node (false, type));
7774 /* Handle cases of two conversions in a row. */
7775 if (CONVERT_EXPR_P (op0))
7777 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7778 tree inter_type = TREE_TYPE (op0);
7779 int inside_int = INTEGRAL_TYPE_P (inside_type);
7780 int inside_ptr = POINTER_TYPE_P (inside_type);
7781 int inside_float = FLOAT_TYPE_P (inside_type);
7782 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7783 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7784 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7785 int inter_int = INTEGRAL_TYPE_P (inter_type);
7786 int inter_ptr = POINTER_TYPE_P (inter_type);
7787 int inter_float = FLOAT_TYPE_P (inter_type);
7788 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7789 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7790 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7791 int final_int = INTEGRAL_TYPE_P (type);
7792 int final_ptr = POINTER_TYPE_P (type);
7793 int final_float = FLOAT_TYPE_P (type);
7794 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7795 unsigned int final_prec = TYPE_PRECISION (type);
7796 int final_unsignedp = TYPE_UNSIGNED (type);
7798 /* check for cases specific to UPC, involving pointer types */
7799 if (final_ptr || inter_ptr || inside_ptr)
7801 int final_pts = final_ptr
7802 && upc_shared_type_p (TREE_TYPE (type));
7803 int inter_pts = inter_ptr
7804 && upc_shared_type_p (TREE_TYPE (inter_type));
7805 int inside_pts = inside_ptr
7806 && upc_shared_type_p (TREE_TYPE (inside_type));
7807 if (final_pts || inter_pts || inside_pts)
7809 if (!((final_pts && inter_pts)
7810 && TREE_TYPE (type) == TREE_TYPE (inter_type))
7811 || ((inter_pts && inside_pts)
7812 && (TREE_TYPE (inter_type)
7813 == TREE_TYPE (inside_type))))
7814 return NULL;
7818 /* In addition to the cases of two conversions in a row
7819 handled below, if we are converting something to its own
7820 type via an object of identical or wider precision, neither
7821 conversion is needed. */
7822 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7823 && (((inter_int || inter_ptr) && final_int)
7824 || (inter_float && final_float))
7825 && inter_prec >= final_prec)
7826 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7828 /* Likewise, if the intermediate and initial types are either both
7829 float or both integer, we don't need the middle conversion if the
7830 former is wider than the latter and doesn't change the signedness
7831 (for integers). Avoid this if the final type is a pointer since
7832 then we sometimes need the middle conversion. Likewise if the
7833 final type has a precision not equal to the size of its mode. */
7834 if (((inter_int && inside_int)
7835 || (inter_float && inside_float)
7836 || (inter_vec && inside_vec))
7837 && inter_prec >= inside_prec
7838 && (inter_float || inter_vec
7839 || inter_unsignedp == inside_unsignedp)
7840 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7841 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7842 && ! final_ptr
7843 && (! final_vec || inter_prec == inside_prec))
7844 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7846 /* If we have a sign-extension of a zero-extended value, we can
7847 replace that by a single zero-extension. Likewise if the
7848 final conversion does not change precision we can drop the
7849 intermediate conversion. */
7850 if (inside_int && inter_int && final_int
7851 && ((inside_prec < inter_prec && inter_prec < final_prec
7852 && inside_unsignedp && !inter_unsignedp)
7853 || final_prec == inter_prec))
7854 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7856 /* Two conversions in a row are not needed unless:
7857 - some conversion is floating-point (overstrict for now), or
7858 - some conversion is a vector (overstrict for now), or
7859 - the intermediate type is narrower than both initial and
7860 final, or
7861 - the intermediate type and innermost type differ in signedness,
7862 and the outermost type is wider than the intermediate, or
7863 - the initial type is a pointer type and the precisions of the
7864 intermediate and final types differ, or
7865 - the final type is a pointer type and the precisions of the
7866 initial and intermediate types differ. */
7867 if (! inside_float && ! inter_float && ! final_float
7868 && ! inside_vec && ! inter_vec && ! final_vec
7869 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7870 && ! (inside_int && inter_int
7871 && inter_unsignedp != inside_unsignedp
7872 && inter_prec < final_prec)
7873 && ((inter_unsignedp && inter_prec > inside_prec)
7874 == (final_unsignedp && final_prec > inter_prec))
7875 && ! (inside_ptr && inter_prec != final_prec)
7876 && ! (final_ptr && inside_prec != inter_prec)
7877 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7878 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7879 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7882 /* Handle (T *)&A.B.C for A being of type T and B and C
7883 living at offset zero. This occurs frequently in
7884 C++ upcasting and then accessing the base. */
7885 if (TREE_CODE (op0) == ADDR_EXPR
7886 && POINTER_TYPE_P (type)
7887 && handled_component_p (TREE_OPERAND (op0, 0)))
7889 HOST_WIDE_INT bitsize, bitpos;
7890 tree offset;
7891 enum machine_mode mode;
7892 int unsignedp, volatilep;
7893 tree base = TREE_OPERAND (op0, 0);
7894 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7895 &mode, &unsignedp, &volatilep, false);
7896 /* If the reference was to a (constant) zero offset, we can use
7897 the address of the base if it has the same base type
7898 as the result type and the pointer type is unqualified. */
7899 if (! offset && bitpos == 0
7900 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7901 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7902 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7903 return fold_convert_loc (loc, type,
7904 build_fold_addr_expr_loc (loc, base));
7907 if (TREE_CODE (op0) == MODIFY_EXPR
7908 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7909 /* Detect assigning a bitfield. */
7910 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7911 && DECL_BIT_FIELD
7912 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7914 /* Don't leave an assignment inside a conversion
7915 unless assigning a bitfield. */
7916 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7917 /* First do the assignment, then return converted constant. */
7918 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7919 TREE_NO_WARNING (tem) = 1;
7920 TREE_USED (tem) = 1;
7921 return tem;
7924 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7925 constants (if x has signed type, the sign bit cannot be set
7926 in c). This folds extension into the BIT_AND_EXPR.
7927 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7928 very likely don't have maximal range for their precision and this
7929 transformation effectively doesn't preserve non-maximal ranges. */
7930 if (TREE_CODE (type) == INTEGER_TYPE
7931 && TREE_CODE (op0) == BIT_AND_EXPR
7932 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7934 tree and_expr = op0;
7935 tree and0 = TREE_OPERAND (and_expr, 0);
7936 tree and1 = TREE_OPERAND (and_expr, 1);
7937 int change = 0;
7939 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7940 || (TYPE_PRECISION (type)
7941 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7942 change = 1;
7943 else if (TYPE_PRECISION (TREE_TYPE (and1))
7944 <= HOST_BITS_PER_WIDE_INT
7945 && host_integerp (and1, 1))
7947 unsigned HOST_WIDE_INT cst;
7949 cst = tree_low_cst (and1, 1);
7950 cst &= (HOST_WIDE_INT) -1
7951 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7952 change = (cst == 0);
7953 #ifdef LOAD_EXTEND_OP
7954 if (change
7955 && !flag_syntax_only
7956 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7957 == ZERO_EXTEND))
7959 tree uns = unsigned_type_for (TREE_TYPE (and0));
7960 and0 = fold_convert_loc (loc, uns, and0);
7961 and1 = fold_convert_loc (loc, uns, and1);
7963 #endif
7965 if (change)
7967 tem = force_fit_type_double (type, tree_to_double_int (and1),
7968 0, TREE_OVERFLOW (and1));
7969 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7970 fold_convert_loc (loc, type, and0), tem);
7974 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7975 when one of the new casts will fold away. Conservatively we assume
7976 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7977 if (POINTER_TYPE_P (type)
7978 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7979 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7980 && !upc_shared_type_p (TREE_TYPE (type))
7981 && !upc_shared_type_p (TREE_TYPE (
7982 TREE_TYPE (TREE_OPERAND (arg0, 0))))
7983 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7984 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7985 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7987 tree arg00 = TREE_OPERAND (arg0, 0);
7988 tree arg01 = TREE_OPERAND (arg0, 1);
7990 return fold_build_pointer_plus_loc
7991 (loc, fold_convert_loc (loc, type, arg00), arg01);
7994 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7995 of the same precision, and X is an integer type not narrower than
7996 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7997 if (INTEGRAL_TYPE_P (type)
7998 && TREE_CODE (op0) == BIT_NOT_EXPR
7999 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8000 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8001 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8003 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8004 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8005 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8006 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8007 fold_convert_loc (loc, type, tem));
8010 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8011 type of X and Y (integer types only). */
8012 if (INTEGRAL_TYPE_P (type)
8013 && TREE_CODE (op0) == MULT_EXPR
8014 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8015 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8017 /* Be careful not to introduce new overflows. */
8018 tree mult_type;
8019 if (TYPE_OVERFLOW_WRAPS (type))
8020 mult_type = type;
8021 else
8022 mult_type = unsigned_type_for (type);
8024 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8026 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8027 fold_convert_loc (loc, mult_type,
8028 TREE_OPERAND (op0, 0)),
8029 fold_convert_loc (loc, mult_type,
8030 TREE_OPERAND (op0, 1)));
8031 return fold_convert_loc (loc, type, tem);
8035 tem = fold_convert_const (code, type, op0);
8036 return tem ? tem : NULL_TREE;
8038 case ADDR_SPACE_CONVERT_EXPR:
8039 if (integer_zerop (arg0))
8040 return fold_convert_const (code, type, arg0);
8041 return NULL_TREE;
8043 case FIXED_CONVERT_EXPR:
8044 tem = fold_convert_const (code, type, arg0);
8045 return tem ? tem : NULL_TREE;
8047 case VIEW_CONVERT_EXPR:
8048 if (TREE_TYPE (op0) == type)
8049 return op0;
8050 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8051 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8052 type, TREE_OPERAND (op0, 0));
8053 if (TREE_CODE (op0) == MEM_REF)
8054 return fold_build2_loc (loc, MEM_REF, type,
8055 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8057 /* For integral conversions with the same precision or pointer
8058 conversions use a NOP_EXPR instead. */
8059 if ((INTEGRAL_TYPE_P (type)
8060 || (POINTER_TYPE_P (type)
8061 && !upc_shared_type_p (TREE_TYPE (type))))
8062 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8063 || (POINTER_TYPE_P (TREE_TYPE (op0))
8064 && !upc_shared_type_p (TREE_TYPE (TREE_TYPE (op0)))))
8065 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8066 return fold_convert_loc (loc, type, op0);
8068 /* Strip inner integral conversions that do not change the precision. */
8069 if (CONVERT_EXPR_P (op0)
8070 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8071 || (POINTER_TYPE_P (TREE_TYPE (op0))
8072 && !upc_shared_type_p (TREE_TYPE (TREE_TYPE (op0)))))
8073 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8074 || (POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8075 && !upc_shared_type_p (TREE_TYPE (
8076 TREE_TYPE (
8077 TREE_OPERAND (op0, 0))))))
8078 && (TYPE_PRECISION (TREE_TYPE (op0))
8079 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8080 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8081 type, TREE_OPERAND (op0, 0));
8083 return fold_view_convert_expr (type, op0);
8085 case NEGATE_EXPR:
8086 tem = fold_negate_expr (loc, arg0);
8087 if (tem)
8088 return fold_convert_loc (loc, type, tem);
8089 return NULL_TREE;
8091 case ABS_EXPR:
8092 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8093 return fold_abs_const (arg0, type);
8094 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8095 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8096 /* Convert fabs((double)float) into (double)fabsf(float). */
8097 else if (TREE_CODE (arg0) == NOP_EXPR
8098 && TREE_CODE (type) == REAL_TYPE)
8100 tree targ0 = strip_float_extensions (arg0);
8101 if (targ0 != arg0)
8102 return fold_convert_loc (loc, type,
8103 fold_build1_loc (loc, ABS_EXPR,
8104 TREE_TYPE (targ0),
8105 targ0));
8107 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8108 else if (TREE_CODE (arg0) == ABS_EXPR)
8109 return arg0;
8110 else if (tree_expr_nonnegative_p (arg0))
8111 return arg0;
8113 /* Strip sign ops from argument. */
8114 if (TREE_CODE (type) == REAL_TYPE)
8116 tem = fold_strip_sign_ops (arg0);
8117 if (tem)
8118 return fold_build1_loc (loc, ABS_EXPR, type,
8119 fold_convert_loc (loc, type, tem));
8121 return NULL_TREE;
8123 case CONJ_EXPR:
8124 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8125 return fold_convert_loc (loc, type, arg0);
8126 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8128 tree itype = TREE_TYPE (type);
8129 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8130 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8131 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8132 negate_expr (ipart));
8134 if (TREE_CODE (arg0) == COMPLEX_CST)
8136 tree itype = TREE_TYPE (type);
8137 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8138 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8139 return build_complex (type, rpart, negate_expr (ipart));
8141 if (TREE_CODE (arg0) == CONJ_EXPR)
8142 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8143 return NULL_TREE;
8145 case BIT_NOT_EXPR:
8146 if (TREE_CODE (arg0) == INTEGER_CST)
8147 return fold_not_const (arg0, type);
8148 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8149 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8150 /* Convert ~ (-A) to A - 1. */
8151 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8152 return fold_build2_loc (loc, MINUS_EXPR, type,
8153 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8154 build_int_cst (type, 1));
8155 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8156 else if (INTEGRAL_TYPE_P (type)
8157 && ((TREE_CODE (arg0) == MINUS_EXPR
8158 && integer_onep (TREE_OPERAND (arg0, 1)))
8159 || (TREE_CODE (arg0) == PLUS_EXPR
8160 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8161 return fold_build1_loc (loc, NEGATE_EXPR, type,
8162 fold_convert_loc (loc, type,
8163 TREE_OPERAND (arg0, 0)));
8164 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8165 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8166 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8167 fold_convert_loc (loc, type,
8168 TREE_OPERAND (arg0, 0)))))
8169 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8170 fold_convert_loc (loc, type,
8171 TREE_OPERAND (arg0, 1)));
8172 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8173 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8174 fold_convert_loc (loc, type,
8175 TREE_OPERAND (arg0, 1)))))
8176 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8177 fold_convert_loc (loc, type,
8178 TREE_OPERAND (arg0, 0)), tem);
8179 /* Perform BIT_NOT_EXPR on each element individually. */
8180 else if (TREE_CODE (arg0) == VECTOR_CST)
8182 tree *elements;
8183 tree elem;
8184 unsigned count = VECTOR_CST_NELTS (arg0), i;
8186 elements = XALLOCAVEC (tree, count);
8187 for (i = 0; i < count; i++)
8189 elem = VECTOR_CST_ELT (arg0, i);
8190 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8191 if (elem == NULL_TREE)
8192 break;
8193 elements[i] = elem;
8195 if (i == count)
8196 return build_vector (type, elements);
8199 return NULL_TREE;
8201 case TRUTH_NOT_EXPR:
8202 /* The argument to invert_truthvalue must have Boolean type. */
8203 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8204 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8206 /* Note that the operand of this must be an int
8207 and its values must be 0 or 1.
8208 ("true" is a fixed value perhaps depending on the language,
8209 but we don't handle values other than 1 correctly yet.) */
8210 tem = fold_truth_not_expr (loc, arg0);
8211 if (!tem)
8212 return NULL_TREE;
8213 return fold_convert_loc (loc, type, tem);
8215 case REALPART_EXPR:
8216 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8217 return fold_convert_loc (loc, type, arg0);
8218 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8219 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8220 TREE_OPERAND (arg0, 1));
8221 if (TREE_CODE (arg0) == COMPLEX_CST)
8222 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8223 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8225 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8226 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8227 fold_build1_loc (loc, REALPART_EXPR, itype,
8228 TREE_OPERAND (arg0, 0)),
8229 fold_build1_loc (loc, REALPART_EXPR, itype,
8230 TREE_OPERAND (arg0, 1)));
8231 return fold_convert_loc (loc, type, tem);
8233 if (TREE_CODE (arg0) == CONJ_EXPR)
8235 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8236 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8237 TREE_OPERAND (arg0, 0));
8238 return fold_convert_loc (loc, type, tem);
8240 if (TREE_CODE (arg0) == CALL_EXPR)
8242 tree fn = get_callee_fndecl (arg0);
8243 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8244 switch (DECL_FUNCTION_CODE (fn))
8246 CASE_FLT_FN (BUILT_IN_CEXPI):
8247 fn = mathfn_built_in (type, BUILT_IN_COS);
8248 if (fn)
8249 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8250 break;
8252 default:
8253 break;
8256 return NULL_TREE;
8258 case IMAGPART_EXPR:
8259 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8260 return build_zero_cst (type);
8261 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8262 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8263 TREE_OPERAND (arg0, 0));
8264 if (TREE_CODE (arg0) == COMPLEX_CST)
8265 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8266 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8268 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8269 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8270 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8271 TREE_OPERAND (arg0, 0)),
8272 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8273 TREE_OPERAND (arg0, 1)));
8274 return fold_convert_loc (loc, type, tem);
8276 if (TREE_CODE (arg0) == CONJ_EXPR)
8278 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8279 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8280 return fold_convert_loc (loc, type, negate_expr (tem));
8282 if (TREE_CODE (arg0) == CALL_EXPR)
8284 tree fn = get_callee_fndecl (arg0);
8285 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8286 switch (DECL_FUNCTION_CODE (fn))
8288 CASE_FLT_FN (BUILT_IN_CEXPI):
8289 fn = mathfn_built_in (type, BUILT_IN_SIN);
8290 if (fn)
8291 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8292 break;
8294 default:
8295 break;
8298 return NULL_TREE;
8300 case INDIRECT_REF:
8301 /* Fold *&X to X if X is an lvalue. */
8302 if (TREE_CODE (op0) == ADDR_EXPR)
8304 tree op00 = TREE_OPERAND (op0, 0);
8305 if ((TREE_CODE (op00) == VAR_DECL
8306 || TREE_CODE (op00) == PARM_DECL
8307 || TREE_CODE (op00) == RESULT_DECL)
8308 && !TREE_READONLY (op00))
8309 return op00;
8311 return NULL_TREE;
8313 case VEC_UNPACK_LO_EXPR:
8314 case VEC_UNPACK_HI_EXPR:
8315 case VEC_UNPACK_FLOAT_LO_EXPR:
8316 case VEC_UNPACK_FLOAT_HI_EXPR:
8318 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8319 tree *elts;
8320 enum tree_code subcode;
8322 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8323 if (TREE_CODE (arg0) != VECTOR_CST)
8324 return NULL_TREE;
8326 elts = XALLOCAVEC (tree, nelts * 2);
8327 if (!vec_cst_ctor_to_array (arg0, elts))
8328 return NULL_TREE;
8330 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8331 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8332 elts += nelts;
8334 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8335 subcode = NOP_EXPR;
8336 else
8337 subcode = FLOAT_EXPR;
8339 for (i = 0; i < nelts; i++)
8341 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8342 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8343 return NULL_TREE;
8346 return build_vector (type, elts);
8349 case REDUC_MIN_EXPR:
8350 case REDUC_MAX_EXPR:
8351 case REDUC_PLUS_EXPR:
8353 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8354 tree *elts;
8355 enum tree_code subcode;
8357 if (TREE_CODE (op0) != VECTOR_CST)
8358 return NULL_TREE;
8360 elts = XALLOCAVEC (tree, nelts);
8361 if (!vec_cst_ctor_to_array (op0, elts))
8362 return NULL_TREE;
8364 switch (code)
8366 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8367 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8368 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8369 default: gcc_unreachable ();
8372 for (i = 1; i < nelts; i++)
8374 elts[0] = const_binop (subcode, elts[0], elts[i]);
8375 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8376 return NULL_TREE;
8377 elts[i] = build_zero_cst (TREE_TYPE (type));
8380 return build_vector (type, elts);
8383 default:
8384 return NULL_TREE;
8385 } /* switch (code) */
8389 /* If the operation was a conversion do _not_ mark a resulting constant
8390 with TREE_OVERFLOW if the original constant was not. These conversions
8391 have implementation defined behavior and retaining the TREE_OVERFLOW
8392 flag here would confuse later passes such as VRP. */
8393 tree
8394 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8395 tree type, tree op0)
8397 tree res = fold_unary_loc (loc, code, type, op0);
8398 if (res
8399 && TREE_CODE (res) == INTEGER_CST
8400 && TREE_CODE (op0) == INTEGER_CST
8401 && CONVERT_EXPR_CODE_P (code))
8402 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8404 return res;
8407 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8408 operands OP0 and OP1. LOC is the location of the resulting expression.
8409 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8410 Return the folded expression if folding is successful. Otherwise,
8411 return NULL_TREE. */
8412 static tree
8413 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8414 tree arg0, tree arg1, tree op0, tree op1)
8416 tree tem;
8418 /* We only do these simplifications if we are optimizing. */
8419 if (!optimize)
8420 return NULL_TREE;
8422 /* Check for things like (A || B) && (A || C). We can convert this
8423 to A || (B && C). Note that either operator can be any of the four
8424 truth and/or operations and the transformation will still be
8425 valid. Also note that we only care about order for the
8426 ANDIF and ORIF operators. If B contains side effects, this
8427 might change the truth-value of A. */
8428 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8429 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8430 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8431 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8432 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8433 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8435 tree a00 = TREE_OPERAND (arg0, 0);
8436 tree a01 = TREE_OPERAND (arg0, 1);
8437 tree a10 = TREE_OPERAND (arg1, 0);
8438 tree a11 = TREE_OPERAND (arg1, 1);
8439 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8440 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8441 && (code == TRUTH_AND_EXPR
8442 || code == TRUTH_OR_EXPR));
8444 if (operand_equal_p (a00, a10, 0))
8445 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8446 fold_build2_loc (loc, code, type, a01, a11));
8447 else if (commutative && operand_equal_p (a00, a11, 0))
8448 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8449 fold_build2_loc (loc, code, type, a01, a10));
8450 else if (commutative && operand_equal_p (a01, a10, 0))
8451 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8452 fold_build2_loc (loc, code, type, a00, a11));
8454 /* This case if tricky because we must either have commutative
8455 operators or else A10 must not have side-effects. */
8457 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8458 && operand_equal_p (a01, a11, 0))
8459 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8460 fold_build2_loc (loc, code, type, a00, a10),
8461 a01);
8464 /* See if we can build a range comparison. */
8465 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8466 return tem;
8468 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8469 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8471 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8472 if (tem)
8473 return fold_build2_loc (loc, code, type, tem, arg1);
8476 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8477 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8479 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8480 if (tem)
8481 return fold_build2_loc (loc, code, type, arg0, tem);
8484 /* Check for the possibility of merging component references. If our
8485 lhs is another similar operation, try to merge its rhs with our
8486 rhs. Then try to merge our lhs and rhs. */
8487 if (TREE_CODE (arg0) == code
8488 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8489 TREE_OPERAND (arg0, 1), arg1)))
8490 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8492 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8493 return tem;
8495 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8496 && (code == TRUTH_AND_EXPR
8497 || code == TRUTH_ANDIF_EXPR
8498 || code == TRUTH_OR_EXPR
8499 || code == TRUTH_ORIF_EXPR))
8501 enum tree_code ncode, icode;
8503 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8504 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8505 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8507 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8508 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8509 We don't want to pack more than two leafs to a non-IF AND/OR
8510 expression.
8511 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8512 equal to IF-CODE, then we don't want to add right-hand operand.
8513 If the inner right-hand side of left-hand operand has
8514 side-effects, or isn't simple, then we can't add to it,
8515 as otherwise we might destroy if-sequence. */
8516 if (TREE_CODE (arg0) == icode
8517 && simple_operand_p_2 (arg1)
8518 /* Needed for sequence points to handle trappings, and
8519 side-effects. */
8520 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8522 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8523 arg1);
8524 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8525 tem);
8527 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8528 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8529 else if (TREE_CODE (arg1) == icode
8530 && simple_operand_p_2 (arg0)
8531 /* Needed for sequence points to handle trappings, and
8532 side-effects. */
8533 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8535 tem = fold_build2_loc (loc, ncode, type,
8536 arg0, TREE_OPERAND (arg1, 0));
8537 return fold_build2_loc (loc, icode, type, tem,
8538 TREE_OPERAND (arg1, 1));
8540 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8541 into (A OR B).
8542 For sequence point consistancy, we need to check for trapping,
8543 and side-effects. */
8544 else if (code == icode && simple_operand_p_2 (arg0)
8545 && simple_operand_p_2 (arg1))
8546 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8549 return NULL_TREE;
8552 /* Fold a binary expression of code CODE and type TYPE with operands
8553 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8554 Return the folded expression if folding is successful. Otherwise,
8555 return NULL_TREE. */
8557 static tree
8558 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8560 enum tree_code compl_code;
8562 if (code == MIN_EXPR)
8563 compl_code = MAX_EXPR;
8564 else if (code == MAX_EXPR)
8565 compl_code = MIN_EXPR;
8566 else
8567 gcc_unreachable ();
8569 /* MIN (MAX (a, b), b) == b. */
8570 if (TREE_CODE (op0) == compl_code
8571 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8572 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8574 /* MIN (MAX (b, a), b) == b. */
8575 if (TREE_CODE (op0) == compl_code
8576 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8577 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8578 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8580 /* MIN (a, MAX (a, b)) == a. */
8581 if (TREE_CODE (op1) == compl_code
8582 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8583 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8584 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8586 /* MIN (a, MAX (b, a)) == a. */
8587 if (TREE_CODE (op1) == compl_code
8588 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8589 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8590 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8592 return NULL_TREE;
8595 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8596 by changing CODE to reduce the magnitude of constants involved in
8597 ARG0 of the comparison.
8598 Returns a canonicalized comparison tree if a simplification was
8599 possible, otherwise returns NULL_TREE.
8600 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8601 valid if signed overflow is undefined. */
8603 static tree
8604 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8605 tree arg0, tree arg1,
8606 bool *strict_overflow_p)
8608 enum tree_code code0 = TREE_CODE (arg0);
8609 tree t, cst0 = NULL_TREE;
8610 int sgn0;
8611 bool swap = false;
8613 /* Match A +- CST code arg1 and CST code arg1. We can change the
8614 first form only if overflow is undefined. */
8615 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8616 /* In principle pointers also have undefined overflow behavior,
8617 but that causes problems elsewhere. */
8618 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8619 && (code0 == MINUS_EXPR
8620 || code0 == PLUS_EXPR)
8621 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8622 || code0 == INTEGER_CST))
8623 return NULL_TREE;
8625 /* Identify the constant in arg0 and its sign. */
8626 if (code0 == INTEGER_CST)
8627 cst0 = arg0;
8628 else
8629 cst0 = TREE_OPERAND (arg0, 1);
8630 sgn0 = tree_int_cst_sgn (cst0);
8632 /* Overflowed constants and zero will cause problems. */
8633 if (integer_zerop (cst0)
8634 || TREE_OVERFLOW (cst0))
8635 return NULL_TREE;
8637 /* See if we can reduce the magnitude of the constant in
8638 arg0 by changing the comparison code. */
8639 if (code0 == INTEGER_CST)
8641 /* CST <= arg1 -> CST-1 < arg1. */
8642 if (code == LE_EXPR && sgn0 == 1)
8643 code = LT_EXPR;
8644 /* -CST < arg1 -> -CST-1 <= arg1. */
8645 else if (code == LT_EXPR && sgn0 == -1)
8646 code = LE_EXPR;
8647 /* CST > arg1 -> CST-1 >= arg1. */
8648 else if (code == GT_EXPR && sgn0 == 1)
8649 code = GE_EXPR;
8650 /* -CST >= arg1 -> -CST-1 > arg1. */
8651 else if (code == GE_EXPR && sgn0 == -1)
8652 code = GT_EXPR;
8653 else
8654 return NULL_TREE;
8655 /* arg1 code' CST' might be more canonical. */
8656 swap = true;
8658 else
8660 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8661 if (code == LT_EXPR
8662 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8663 code = LE_EXPR;
8664 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8665 else if (code == GT_EXPR
8666 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8667 code = GE_EXPR;
8668 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8669 else if (code == LE_EXPR
8670 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8671 code = LT_EXPR;
8672 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8673 else if (code == GE_EXPR
8674 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8675 code = GT_EXPR;
8676 else
8677 return NULL_TREE;
8678 *strict_overflow_p = true;
8681 /* Now build the constant reduced in magnitude. But not if that
8682 would produce one outside of its types range. */
8683 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8684 && ((sgn0 == 1
8685 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8686 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8687 || (sgn0 == -1
8688 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8689 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8690 /* We cannot swap the comparison here as that would cause us to
8691 endlessly recurse. */
8692 return NULL_TREE;
8694 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8695 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8696 if (code0 != INTEGER_CST)
8697 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8698 t = fold_convert (TREE_TYPE (arg1), t);
8700 /* If swapping might yield to a more canonical form, do so. */
8701 if (swap)
8702 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8703 else
8704 return fold_build2_loc (loc, code, type, t, arg1);
8707 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8708 overflow further. Try to decrease the magnitude of constants involved
8709 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8710 and put sole constants at the second argument position.
8711 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8713 static tree
8714 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8715 tree arg0, tree arg1)
8717 tree t;
8718 bool strict_overflow_p;
8719 const char * const warnmsg = G_("assuming signed overflow does not occur "
8720 "when reducing constant in comparison");
8722 /* Try canonicalization by simplifying arg0. */
8723 strict_overflow_p = false;
8724 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8725 &strict_overflow_p);
8726 if (t)
8728 if (strict_overflow_p)
8729 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8730 return t;
8733 /* Try canonicalization by simplifying arg1 using the swapped
8734 comparison. */
8735 code = swap_tree_comparison (code);
8736 strict_overflow_p = false;
8737 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8738 &strict_overflow_p);
8739 if (t && strict_overflow_p)
8740 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8741 return t;
8744 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8745 space. This is used to avoid issuing overflow warnings for
8746 expressions like &p->x which can not wrap. */
8748 static bool
8749 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8751 double_int di_offset, total;
8753 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8754 return true;
8756 if (bitpos < 0)
8757 return true;
8759 if (offset == NULL_TREE)
8760 di_offset = double_int_zero;
8761 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8762 return true;
8763 else
8764 di_offset = TREE_INT_CST (offset);
8766 bool overflow;
8767 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8768 total = di_offset.add_with_sign (units, true, &overflow);
8769 if (overflow)
8770 return true;
8772 if (total.high != 0)
8773 return true;
8775 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8776 if (size <= 0)
8777 return true;
8779 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8780 array. */
8781 if (TREE_CODE (base) == ADDR_EXPR)
8783 HOST_WIDE_INT base_size;
8785 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8786 if (base_size > 0 && size < base_size)
8787 size = base_size;
8790 return total.low > (unsigned HOST_WIDE_INT) size;
8793 /* Subroutine of fold_binary. This routine performs all of the
8794 transformations that are common to the equality/inequality
8795 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8796 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8797 fold_binary should call fold_binary. Fold a comparison with
8798 tree code CODE and type TYPE with operands OP0 and OP1. Return
8799 the folded comparison or NULL_TREE. */
8801 static tree
8802 fold_comparison (location_t loc, enum tree_code code, tree type,
8803 tree op0, tree op1)
8805 tree arg0, arg1, tem;
8807 arg0 = op0;
8808 arg1 = op1;
8810 STRIP_SIGN_NOPS (arg0);
8811 STRIP_SIGN_NOPS (arg1);
8813 tem = fold_relational_const (code, type, arg0, arg1);
8814 if (tem != NULL_TREE)
8815 return tem;
8817 /* If one arg is a real or integer constant, put it last. */
8818 if (tree_swap_operands_p (arg0, arg1, true))
8819 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8821 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8822 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8823 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8824 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8825 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8826 && (TREE_CODE (arg1) == INTEGER_CST
8827 && !TREE_OVERFLOW (arg1)))
8829 tree const1 = TREE_OPERAND (arg0, 1);
8830 tree const2 = arg1;
8831 tree variable = TREE_OPERAND (arg0, 0);
8832 tree lhs;
8833 int lhs_add;
8834 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8836 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8837 TREE_TYPE (arg1), const2, const1);
8839 /* If the constant operation overflowed this can be
8840 simplified as a comparison against INT_MAX/INT_MIN. */
8841 if (TREE_CODE (lhs) == INTEGER_CST
8842 && TREE_OVERFLOW (lhs))
8844 int const1_sgn = tree_int_cst_sgn (const1);
8845 enum tree_code code2 = code;
8847 /* Get the sign of the constant on the lhs if the
8848 operation were VARIABLE + CONST1. */
8849 if (TREE_CODE (arg0) == MINUS_EXPR)
8850 const1_sgn = -const1_sgn;
8852 /* The sign of the constant determines if we overflowed
8853 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8854 Canonicalize to the INT_MIN overflow by swapping the comparison
8855 if necessary. */
8856 if (const1_sgn == -1)
8857 code2 = swap_tree_comparison (code);
8859 /* We now can look at the canonicalized case
8860 VARIABLE + 1 CODE2 INT_MIN
8861 and decide on the result. */
8862 if (code2 == LT_EXPR
8863 || code2 == LE_EXPR
8864 || code2 == EQ_EXPR)
8865 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8866 else if (code2 == NE_EXPR
8867 || code2 == GE_EXPR
8868 || code2 == GT_EXPR)
8869 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8872 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8873 && (TREE_CODE (lhs) != INTEGER_CST
8874 || !TREE_OVERFLOW (lhs)))
8876 if (code != EQ_EXPR && code != NE_EXPR)
8877 fold_overflow_warning ("assuming signed overflow does not occur "
8878 "when changing X +- C1 cmp C2 to "
8879 "X cmp C1 +- C2",
8880 WARN_STRICT_OVERFLOW_COMPARISON);
8881 return fold_build2_loc (loc, code, type, variable, lhs);
8885 /* For comparisons of pointers we can decompose it to a compile time
8886 comparison of the base objects and the offsets into the object.
8887 This requires at least one operand being an ADDR_EXPR or a
8888 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8889 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8890 && (TREE_CODE (arg0) == ADDR_EXPR
8891 || TREE_CODE (arg1) == ADDR_EXPR
8892 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8893 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8895 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8896 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8897 enum machine_mode mode;
8898 int volatilep, unsignedp;
8899 bool indirect_base0 = false, indirect_base1 = false;
8901 /* Get base and offset for the access. Strip ADDR_EXPR for
8902 get_inner_reference, but put it back by stripping INDIRECT_REF
8903 off the base object if possible. indirect_baseN will be true
8904 if baseN is not an address but refers to the object itself. */
8905 base0 = arg0;
8906 if (TREE_CODE (arg0) == ADDR_EXPR)
8908 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8909 &bitsize, &bitpos0, &offset0, &mode,
8910 &unsignedp, &volatilep, false);
8911 if (TREE_CODE (base0) == INDIRECT_REF)
8912 base0 = TREE_OPERAND (base0, 0);
8913 else
8914 indirect_base0 = true;
8916 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8918 base0 = TREE_OPERAND (arg0, 0);
8919 STRIP_SIGN_NOPS (base0);
8920 if (TREE_CODE (base0) == ADDR_EXPR)
8922 base0 = TREE_OPERAND (base0, 0);
8923 indirect_base0 = true;
8925 offset0 = TREE_OPERAND (arg0, 1);
8926 if (host_integerp (offset0, 0))
8928 HOST_WIDE_INT off = size_low_cst (offset0);
8929 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8930 * BITS_PER_UNIT)
8931 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8933 bitpos0 = off * BITS_PER_UNIT;
8934 offset0 = NULL_TREE;
8939 base1 = arg1;
8940 if (TREE_CODE (arg1) == ADDR_EXPR)
8942 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8943 &bitsize, &bitpos1, &offset1, &mode,
8944 &unsignedp, &volatilep, false);
8945 if (TREE_CODE (base1) == INDIRECT_REF)
8946 base1 = TREE_OPERAND (base1, 0);
8947 else
8948 indirect_base1 = true;
8950 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8952 base1 = TREE_OPERAND (arg1, 0);
8953 STRIP_SIGN_NOPS (base1);
8954 if (TREE_CODE (base1) == ADDR_EXPR)
8956 base1 = TREE_OPERAND (base1, 0);
8957 indirect_base1 = true;
8959 offset1 = TREE_OPERAND (arg1, 1);
8960 if (host_integerp (offset1, 0))
8962 HOST_WIDE_INT off = size_low_cst (offset1);
8963 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8964 * BITS_PER_UNIT)
8965 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8967 bitpos1 = off * BITS_PER_UNIT;
8968 offset1 = NULL_TREE;
8973 /* A local variable can never be pointed to by
8974 the default SSA name of an incoming parameter. */
8975 if ((TREE_CODE (arg0) == ADDR_EXPR
8976 && indirect_base0
8977 && TREE_CODE (base0) == VAR_DECL
8978 && auto_var_in_fn_p (base0, current_function_decl)
8979 && !indirect_base1
8980 && TREE_CODE (base1) == SSA_NAME
8981 && SSA_NAME_IS_DEFAULT_DEF (base1)
8982 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8983 || (TREE_CODE (arg1) == ADDR_EXPR
8984 && indirect_base1
8985 && TREE_CODE (base1) == VAR_DECL
8986 && auto_var_in_fn_p (base1, current_function_decl)
8987 && !indirect_base0
8988 && TREE_CODE (base0) == SSA_NAME
8989 && SSA_NAME_IS_DEFAULT_DEF (base0)
8990 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8992 if (code == NE_EXPR)
8993 return constant_boolean_node (1, type);
8994 else if (code == EQ_EXPR)
8995 return constant_boolean_node (0, type);
8997 /* If we have equivalent bases we might be able to simplify. */
8998 else if (indirect_base0 == indirect_base1
8999 && operand_equal_p (base0, base1, 0))
9001 /* We can fold this expression to a constant if the non-constant
9002 offset parts are equal. */
9003 if ((offset0 == offset1
9004 || (offset0 && offset1
9005 && operand_equal_p (offset0, offset1, 0)))
9006 && (code == EQ_EXPR
9007 || code == NE_EXPR
9008 || (indirect_base0 && DECL_P (base0))
9009 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9012 if (code != EQ_EXPR
9013 && code != NE_EXPR
9014 && bitpos0 != bitpos1
9015 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9016 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9017 fold_overflow_warning (("assuming pointer wraparound does not "
9018 "occur when comparing P +- C1 with "
9019 "P +- C2"),
9020 WARN_STRICT_OVERFLOW_CONDITIONAL);
9022 switch (code)
9024 case EQ_EXPR:
9025 return constant_boolean_node (bitpos0 == bitpos1, type);
9026 case NE_EXPR:
9027 return constant_boolean_node (bitpos0 != bitpos1, type);
9028 case LT_EXPR:
9029 return constant_boolean_node (bitpos0 < bitpos1, type);
9030 case LE_EXPR:
9031 return constant_boolean_node (bitpos0 <= bitpos1, type);
9032 case GE_EXPR:
9033 return constant_boolean_node (bitpos0 >= bitpos1, type);
9034 case GT_EXPR:
9035 return constant_boolean_node (bitpos0 > bitpos1, type);
9036 default:;
9039 /* We can simplify the comparison to a comparison of the variable
9040 offset parts if the constant offset parts are equal.
9041 Be careful to use signed sizetype here because otherwise we
9042 mess with array offsets in the wrong way. This is possible
9043 because pointer arithmetic is restricted to retain within an
9044 object and overflow on pointer differences is undefined as of
9045 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9046 else if (bitpos0 == bitpos1
9047 && ((code == EQ_EXPR || code == NE_EXPR)
9048 || (indirect_base0 && DECL_P (base0))
9049 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9051 /* By converting to signed sizetype we cover middle-end pointer
9052 arithmetic which operates on unsigned pointer types of size
9053 type size and ARRAY_REF offsets which are properly sign or
9054 zero extended from their type in case it is narrower than
9055 sizetype. */
9056 if (offset0 == NULL_TREE)
9057 offset0 = build_int_cst (ssizetype, 0);
9058 else
9059 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9060 if (offset1 == NULL_TREE)
9061 offset1 = build_int_cst (ssizetype, 0);
9062 else
9063 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9065 if (code != EQ_EXPR
9066 && code != NE_EXPR
9067 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9068 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9069 fold_overflow_warning (("assuming pointer wraparound does not "
9070 "occur when comparing P +- C1 with "
9071 "P +- C2"),
9072 WARN_STRICT_OVERFLOW_COMPARISON);
9074 return fold_build2_loc (loc, code, type, offset0, offset1);
9077 /* For non-equal bases we can simplify if they are addresses
9078 of local binding decls or constants. */
9079 else if (indirect_base0 && indirect_base1
9080 /* We know that !operand_equal_p (base0, base1, 0)
9081 because the if condition was false. But make
9082 sure two decls are not the same. */
9083 && base0 != base1
9084 && TREE_CODE (arg0) == ADDR_EXPR
9085 && TREE_CODE (arg1) == ADDR_EXPR
9086 && (((TREE_CODE (base0) == VAR_DECL
9087 || TREE_CODE (base0) == PARM_DECL)
9088 && (targetm.binds_local_p (base0)
9089 || CONSTANT_CLASS_P (base1)))
9090 || CONSTANT_CLASS_P (base0))
9091 && (((TREE_CODE (base1) == VAR_DECL
9092 || TREE_CODE (base1) == PARM_DECL)
9093 && (targetm.binds_local_p (base1)
9094 || CONSTANT_CLASS_P (base0)))
9095 || CONSTANT_CLASS_P (base1)))
9097 if (code == EQ_EXPR)
9098 return omit_two_operands_loc (loc, type, boolean_false_node,
9099 arg0, arg1);
9100 else if (code == NE_EXPR)
9101 return omit_two_operands_loc (loc, type, boolean_true_node,
9102 arg0, arg1);
9104 /* For equal offsets we can simplify to a comparison of the
9105 base addresses. */
9106 else if (bitpos0 == bitpos1
9107 && (indirect_base0
9108 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9109 && (indirect_base1
9110 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9111 && ((offset0 == offset1)
9112 || (offset0 && offset1
9113 && operand_equal_p (offset0, offset1, 0))))
9115 if (indirect_base0)
9116 base0 = build_fold_addr_expr_loc (loc, base0);
9117 if (indirect_base1)
9118 base1 = build_fold_addr_expr_loc (loc, base1);
9119 return fold_build2_loc (loc, code, type, base0, base1);
9123 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9124 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9125 the resulting offset is smaller in absolute value than the
9126 original one. */
9127 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9128 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9129 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9130 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9131 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9132 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9133 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9135 tree const1 = TREE_OPERAND (arg0, 1);
9136 tree const2 = TREE_OPERAND (arg1, 1);
9137 tree variable1 = TREE_OPERAND (arg0, 0);
9138 tree variable2 = TREE_OPERAND (arg1, 0);
9139 tree cst;
9140 const char * const warnmsg = G_("assuming signed overflow does not "
9141 "occur when combining constants around "
9142 "a comparison");
9144 /* Put the constant on the side where it doesn't overflow and is
9145 of lower absolute value than before. */
9146 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9147 ? MINUS_EXPR : PLUS_EXPR,
9148 const2, const1);
9149 if (!TREE_OVERFLOW (cst)
9150 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9152 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9153 return fold_build2_loc (loc, code, type,
9154 variable1,
9155 fold_build2_loc (loc,
9156 TREE_CODE (arg1), TREE_TYPE (arg1),
9157 variable2, cst));
9160 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9161 ? MINUS_EXPR : PLUS_EXPR,
9162 const1, const2);
9163 if (!TREE_OVERFLOW (cst)
9164 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9166 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9167 return fold_build2_loc (loc, code, type,
9168 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9169 variable1, cst),
9170 variable2);
9174 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9175 signed arithmetic case. That form is created by the compiler
9176 often enough for folding it to be of value. One example is in
9177 computing loop trip counts after Operator Strength Reduction. */
9178 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9179 && TREE_CODE (arg0) == MULT_EXPR
9180 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9181 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9182 && integer_zerop (arg1))
9184 tree const1 = TREE_OPERAND (arg0, 1);
9185 tree const2 = arg1; /* zero */
9186 tree variable1 = TREE_OPERAND (arg0, 0);
9187 enum tree_code cmp_code = code;
9189 /* Handle unfolded multiplication by zero. */
9190 if (integer_zerop (const1))
9191 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9193 fold_overflow_warning (("assuming signed overflow does not occur when "
9194 "eliminating multiplication in comparison "
9195 "with zero"),
9196 WARN_STRICT_OVERFLOW_COMPARISON);
9198 /* If const1 is negative we swap the sense of the comparison. */
9199 if (tree_int_cst_sgn (const1) < 0)
9200 cmp_code = swap_tree_comparison (cmp_code);
9202 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9205 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9206 if (tem)
9207 return tem;
9209 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9211 tree targ0 = strip_float_extensions (arg0);
9212 tree targ1 = strip_float_extensions (arg1);
9213 tree newtype = TREE_TYPE (targ0);
9215 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9216 newtype = TREE_TYPE (targ1);
9218 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9219 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9220 return fold_build2_loc (loc, code, type,
9221 fold_convert_loc (loc, newtype, targ0),
9222 fold_convert_loc (loc, newtype, targ1));
9224 /* (-a) CMP (-b) -> b CMP a */
9225 if (TREE_CODE (arg0) == NEGATE_EXPR
9226 && TREE_CODE (arg1) == NEGATE_EXPR)
9227 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9228 TREE_OPERAND (arg0, 0));
9230 if (TREE_CODE (arg1) == REAL_CST)
9232 REAL_VALUE_TYPE cst;
9233 cst = TREE_REAL_CST (arg1);
9235 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9236 if (TREE_CODE (arg0) == NEGATE_EXPR)
9237 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9238 TREE_OPERAND (arg0, 0),
9239 build_real (TREE_TYPE (arg1),
9240 real_value_negate (&cst)));
9242 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9243 /* a CMP (-0) -> a CMP 0 */
9244 if (REAL_VALUE_MINUS_ZERO (cst))
9245 return fold_build2_loc (loc, code, type, arg0,
9246 build_real (TREE_TYPE (arg1), dconst0));
9248 /* x != NaN is always true, other ops are always false. */
9249 if (REAL_VALUE_ISNAN (cst)
9250 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9252 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9253 return omit_one_operand_loc (loc, type, tem, arg0);
9256 /* Fold comparisons against infinity. */
9257 if (REAL_VALUE_ISINF (cst)
9258 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9260 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9261 if (tem != NULL_TREE)
9262 return tem;
9266 /* If this is a comparison of a real constant with a PLUS_EXPR
9267 or a MINUS_EXPR of a real constant, we can convert it into a
9268 comparison with a revised real constant as long as no overflow
9269 occurs when unsafe_math_optimizations are enabled. */
9270 if (flag_unsafe_math_optimizations
9271 && TREE_CODE (arg1) == REAL_CST
9272 && (TREE_CODE (arg0) == PLUS_EXPR
9273 || TREE_CODE (arg0) == MINUS_EXPR)
9274 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9275 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9276 ? MINUS_EXPR : PLUS_EXPR,
9277 arg1, TREE_OPERAND (arg0, 1)))
9278 && !TREE_OVERFLOW (tem))
9279 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9281 /* Likewise, we can simplify a comparison of a real constant with
9282 a MINUS_EXPR whose first operand is also a real constant, i.e.
9283 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9284 floating-point types only if -fassociative-math is set. */
9285 if (flag_associative_math
9286 && TREE_CODE (arg1) == REAL_CST
9287 && TREE_CODE (arg0) == MINUS_EXPR
9288 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9289 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9290 arg1))
9291 && !TREE_OVERFLOW (tem))
9292 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9293 TREE_OPERAND (arg0, 1), tem);
9295 /* Fold comparisons against built-in math functions. */
9296 if (TREE_CODE (arg1) == REAL_CST
9297 && flag_unsafe_math_optimizations
9298 && ! flag_errno_math)
9300 enum built_in_function fcode = builtin_mathfn_code (arg0);
9302 if (fcode != END_BUILTINS)
9304 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9305 if (tem != NULL_TREE)
9306 return tem;
9311 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9312 && CONVERT_EXPR_P (arg0))
9314 /* If we are widening one operand of an integer comparison,
9315 see if the other operand is similarly being widened. Perhaps we
9316 can do the comparison in the narrower type. */
9317 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9318 if (tem)
9319 return tem;
9321 /* Or if we are changing signedness. */
9322 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9323 if (tem)
9324 return tem;
9327 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9328 constant, we can simplify it. */
9329 if (TREE_CODE (arg1) == INTEGER_CST
9330 && (TREE_CODE (arg0) == MIN_EXPR
9331 || TREE_CODE (arg0) == MAX_EXPR)
9332 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9334 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9335 if (tem)
9336 return tem;
9339 /* Simplify comparison of something with itself. (For IEEE
9340 floating-point, we can only do some of these simplifications.) */
9341 if (operand_equal_p (arg0, arg1, 0))
9343 switch (code)
9345 case EQ_EXPR:
9346 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9347 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9348 return constant_boolean_node (1, type);
9349 break;
9351 case GE_EXPR:
9352 case LE_EXPR:
9353 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9354 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9355 return constant_boolean_node (1, type);
9356 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9358 case NE_EXPR:
9359 /* For NE, we can only do this simplification if integer
9360 or we don't honor IEEE floating point NaNs. */
9361 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9362 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9363 break;
9364 /* ... fall through ... */
9365 case GT_EXPR:
9366 case LT_EXPR:
9367 return constant_boolean_node (0, type);
9368 default:
9369 gcc_unreachable ();
9373 /* If we are comparing an expression that just has comparisons
9374 of two integer values, arithmetic expressions of those comparisons,
9375 and constants, we can simplify it. There are only three cases
9376 to check: the two values can either be equal, the first can be
9377 greater, or the second can be greater. Fold the expression for
9378 those three values. Since each value must be 0 or 1, we have
9379 eight possibilities, each of which corresponds to the constant 0
9380 or 1 or one of the six possible comparisons.
9382 This handles common cases like (a > b) == 0 but also handles
9383 expressions like ((x > y) - (y > x)) > 0, which supposedly
9384 occur in macroized code. */
9386 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9388 tree cval1 = 0, cval2 = 0;
9389 int save_p = 0;
9391 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9392 /* Don't handle degenerate cases here; they should already
9393 have been handled anyway. */
9394 && cval1 != 0 && cval2 != 0
9395 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9396 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9397 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9398 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9399 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9400 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9401 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9403 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9404 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9406 /* We can't just pass T to eval_subst in case cval1 or cval2
9407 was the same as ARG1. */
9409 tree high_result
9410 = fold_build2_loc (loc, code, type,
9411 eval_subst (loc, arg0, cval1, maxval,
9412 cval2, minval),
9413 arg1);
9414 tree equal_result
9415 = fold_build2_loc (loc, code, type,
9416 eval_subst (loc, arg0, cval1, maxval,
9417 cval2, maxval),
9418 arg1);
9419 tree low_result
9420 = fold_build2_loc (loc, code, type,
9421 eval_subst (loc, arg0, cval1, minval,
9422 cval2, maxval),
9423 arg1);
9425 /* All three of these results should be 0 or 1. Confirm they are.
9426 Then use those values to select the proper code to use. */
9428 if (TREE_CODE (high_result) == INTEGER_CST
9429 && TREE_CODE (equal_result) == INTEGER_CST
9430 && TREE_CODE (low_result) == INTEGER_CST)
9432 /* Make a 3-bit mask with the high-order bit being the
9433 value for `>', the next for '=', and the low for '<'. */
9434 switch ((integer_onep (high_result) * 4)
9435 + (integer_onep (equal_result) * 2)
9436 + integer_onep (low_result))
9438 case 0:
9439 /* Always false. */
9440 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9441 case 1:
9442 code = LT_EXPR;
9443 break;
9444 case 2:
9445 code = EQ_EXPR;
9446 break;
9447 case 3:
9448 code = LE_EXPR;
9449 break;
9450 case 4:
9451 code = GT_EXPR;
9452 break;
9453 case 5:
9454 code = NE_EXPR;
9455 break;
9456 case 6:
9457 code = GE_EXPR;
9458 break;
9459 case 7:
9460 /* Always true. */
9461 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9464 if (save_p)
9466 tem = save_expr (build2 (code, type, cval1, cval2));
9467 SET_EXPR_LOCATION (tem, loc);
9468 return tem;
9470 return fold_build2_loc (loc, code, type, cval1, cval2);
9475 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9476 into a single range test. */
9477 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9478 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9479 && TREE_CODE (arg1) == INTEGER_CST
9480 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9481 && !integer_zerop (TREE_OPERAND (arg0, 1))
9482 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9483 && !TREE_OVERFLOW (arg1))
9485 tem = fold_div_compare (loc, code, type, arg0, arg1);
9486 if (tem != NULL_TREE)
9487 return tem;
9490 /* Fold ~X op ~Y as Y op X. */
9491 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9492 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9494 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9495 return fold_build2_loc (loc, code, type,
9496 fold_convert_loc (loc, cmp_type,
9497 TREE_OPERAND (arg1, 0)),
9498 TREE_OPERAND (arg0, 0));
9501 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9502 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9503 && TREE_CODE (arg1) == INTEGER_CST)
9505 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9506 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9507 TREE_OPERAND (arg0, 0),
9508 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9509 fold_convert_loc (loc, cmp_type, arg1)));
9512 return NULL_TREE;
9516 /* Subroutine of fold_binary. Optimize complex multiplications of the
9517 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9518 argument EXPR represents the expression "z" of type TYPE. */
9520 static tree
9521 fold_mult_zconjz (location_t loc, tree type, tree expr)
9523 tree itype = TREE_TYPE (type);
9524 tree rpart, ipart, tem;
9526 if (TREE_CODE (expr) == COMPLEX_EXPR)
9528 rpart = TREE_OPERAND (expr, 0);
9529 ipart = TREE_OPERAND (expr, 1);
9531 else if (TREE_CODE (expr) == COMPLEX_CST)
9533 rpart = TREE_REALPART (expr);
9534 ipart = TREE_IMAGPART (expr);
9536 else
9538 expr = save_expr (expr);
9539 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9540 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9543 rpart = save_expr (rpart);
9544 ipart = save_expr (ipart);
9545 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9546 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9547 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9548 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9549 build_zero_cst (itype));
9553 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9554 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9555 guarantees that P and N have the same least significant log2(M) bits.
9556 N is not otherwise constrained. In particular, N is not normalized to
9557 0 <= N < M as is common. In general, the precise value of P is unknown.
9558 M is chosen as large as possible such that constant N can be determined.
9560 Returns M and sets *RESIDUE to N.
9562 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9563 account. This is not always possible due to PR 35705.
9566 static unsigned HOST_WIDE_INT
9567 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9568 bool allow_func_align)
9570 enum tree_code code;
9572 *residue = 0;
9574 code = TREE_CODE (expr);
9575 if (code == ADDR_EXPR)
9577 unsigned int bitalign;
9578 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9579 *residue /= BITS_PER_UNIT;
9580 return bitalign / BITS_PER_UNIT;
9582 else if (code == POINTER_PLUS_EXPR)
9584 tree op0, op1;
9585 unsigned HOST_WIDE_INT modulus;
9586 enum tree_code inner_code;
9588 op0 = TREE_OPERAND (expr, 0);
9589 STRIP_NOPS (op0);
9590 modulus = get_pointer_modulus_and_residue (op0, residue,
9591 allow_func_align);
9593 op1 = TREE_OPERAND (expr, 1);
9594 STRIP_NOPS (op1);
9595 inner_code = TREE_CODE (op1);
9596 if (inner_code == INTEGER_CST)
9598 *residue += TREE_INT_CST_LOW (op1);
9599 return modulus;
9601 else if (inner_code == MULT_EXPR)
9603 op1 = TREE_OPERAND (op1, 1);
9604 if (TREE_CODE (op1) == INTEGER_CST)
9606 unsigned HOST_WIDE_INT align;
9608 /* Compute the greatest power-of-2 divisor of op1. */
9609 align = TREE_INT_CST_LOW (op1);
9610 align &= -align;
9612 /* If align is non-zero and less than *modulus, replace
9613 *modulus with align., If align is 0, then either op1 is 0
9614 or the greatest power-of-2 divisor of op1 doesn't fit in an
9615 unsigned HOST_WIDE_INT. In either case, no additional
9616 constraint is imposed. */
9617 if (align)
9618 modulus = MIN (modulus, align);
9620 return modulus;
9625 /* If we get here, we were unable to determine anything useful about the
9626 expression. */
9627 return 1;
9630 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9631 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9633 static bool
9634 vec_cst_ctor_to_array (tree arg, tree *elts)
9636 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9638 if (TREE_CODE (arg) == VECTOR_CST)
9640 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9641 elts[i] = VECTOR_CST_ELT (arg, i);
9643 else if (TREE_CODE (arg) == CONSTRUCTOR)
9645 constructor_elt *elt;
9647 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9648 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9649 return false;
9650 else
9651 elts[i] = elt->value;
9653 else
9654 return false;
9655 for (; i < nelts; i++)
9656 elts[i]
9657 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9658 return true;
9661 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9662 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9663 NULL_TREE otherwise. */
9665 static tree
9666 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9668 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9669 tree *elts;
9670 bool need_ctor = false;
9672 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9673 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9674 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9675 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9676 return NULL_TREE;
9678 elts = XALLOCAVEC (tree, nelts * 3);
9679 if (!vec_cst_ctor_to_array (arg0, elts)
9680 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9681 return NULL_TREE;
9683 for (i = 0; i < nelts; i++)
9685 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9686 need_ctor = true;
9687 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9690 if (need_ctor)
9692 vec<constructor_elt, va_gc> *v;
9693 vec_alloc (v, nelts);
9694 for (i = 0; i < nelts; i++)
9695 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9696 return build_constructor (type, v);
9698 else
9699 return build_vector (type, &elts[2 * nelts]);
9702 /* Try to fold a pointer difference of type TYPE two address expressions of
9703 array references AREF0 and AREF1 using location LOC. Return a
9704 simplified expression for the difference or NULL_TREE. */
9706 static tree
9707 fold_addr_of_array_ref_difference (location_t loc, tree type,
9708 tree aref0, tree aref1)
9710 tree base0 = TREE_OPERAND (aref0, 0);
9711 tree base1 = TREE_OPERAND (aref1, 0);
9712 tree base_offset = build_int_cst (type, 0);
9714 /* If the bases are array references as well, recurse. If the bases
9715 are pointer indirections compute the difference of the pointers.
9716 If the bases are equal, we are set. */
9717 if ((TREE_CODE (base0) == ARRAY_REF
9718 && TREE_CODE (base1) == ARRAY_REF
9719 && (base_offset
9720 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9721 || (INDIRECT_REF_P (base0)
9722 && INDIRECT_REF_P (base1)
9723 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9724 TREE_OPERAND (base0, 0),
9725 TREE_OPERAND (base1, 0))))
9726 || operand_equal_p (base0, base1, 0))
9728 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9729 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9730 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9731 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9732 return fold_build2_loc (loc, PLUS_EXPR, type,
9733 base_offset,
9734 fold_build2_loc (loc, MULT_EXPR, type,
9735 diff, esz));
9737 return NULL_TREE;
9740 /* If the real or vector real constant CST of type TYPE has an exact
9741 inverse, return it, else return NULL. */
9743 static tree
9744 exact_inverse (tree type, tree cst)
9746 REAL_VALUE_TYPE r;
9747 tree unit_type, *elts;
9748 enum machine_mode mode;
9749 unsigned vec_nelts, i;
9751 switch (TREE_CODE (cst))
9753 case REAL_CST:
9754 r = TREE_REAL_CST (cst);
9756 if (exact_real_inverse (TYPE_MODE (type), &r))
9757 return build_real (type, r);
9759 return NULL_TREE;
9761 case VECTOR_CST:
9762 vec_nelts = VECTOR_CST_NELTS (cst);
9763 elts = XALLOCAVEC (tree, vec_nelts);
9764 unit_type = TREE_TYPE (type);
9765 mode = TYPE_MODE (unit_type);
9767 for (i = 0; i < vec_nelts; i++)
9769 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9770 if (!exact_real_inverse (mode, &r))
9771 return NULL_TREE;
9772 elts[i] = build_real (unit_type, r);
9775 return build_vector (type, elts);
9777 default:
9778 return NULL_TREE;
9782 /* Fold a binary expression of code CODE and type TYPE with operands
9783 OP0 and OP1. LOC is the location of the resulting expression.
9784 Return the folded expression if folding is successful. Otherwise,
9785 return NULL_TREE. */
9787 tree
9788 fold_binary_loc (location_t loc,
9789 enum tree_code code, tree type, tree op0, tree op1)
9791 enum tree_code_class kind = TREE_CODE_CLASS (code);
9792 tree arg0, arg1, tem;
9793 tree t1 = NULL_TREE;
9794 bool strict_overflow_p;
9796 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9797 && TREE_CODE_LENGTH (code) == 2
9798 && op0 != NULL_TREE
9799 && op1 != NULL_TREE);
9801 arg0 = op0;
9802 arg1 = op1;
9804 /* Strip any conversions that don't change the mode. This is
9805 safe for every expression, except for a comparison expression
9806 because its signedness is derived from its operands. So, in
9807 the latter case, only strip conversions that don't change the
9808 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9809 preserved.
9811 Note that this is done as an internal manipulation within the
9812 constant folder, in order to find the simplest representation
9813 of the arguments so that their form can be studied. In any
9814 cases, the appropriate type conversions should be put back in
9815 the tree that will get out of the constant folder. */
9817 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9819 STRIP_SIGN_NOPS (arg0);
9820 STRIP_SIGN_NOPS (arg1);
9822 else
9824 STRIP_NOPS (arg0);
9825 STRIP_NOPS (arg1);
9828 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9829 constant but we can't do arithmetic on them. */
9830 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9831 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9832 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9833 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9834 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9835 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9837 if (kind == tcc_binary)
9839 /* Make sure type and arg0 have the same saturating flag. */
9840 gcc_assert (TYPE_SATURATING (type)
9841 == TYPE_SATURATING (TREE_TYPE (arg0)));
9842 tem = const_binop (code, arg0, arg1);
9844 else if (kind == tcc_comparison)
9845 tem = fold_relational_const (code, type, arg0, arg1);
9846 else
9847 tem = NULL_TREE;
9849 if (tem != NULL_TREE)
9851 if (TREE_TYPE (tem) != type)
9852 tem = fold_convert_loc (loc, type, tem);
9853 return tem;
9857 /* If this is a commutative operation, and ARG0 is a constant, move it
9858 to ARG1 to reduce the number of tests below. */
9859 if (commutative_tree_code (code)
9860 && tree_swap_operands_p (arg0, arg1, true))
9861 return fold_build2_loc (loc, code, type, op1, op0);
9863 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9865 First check for cases where an arithmetic operation is applied to a
9866 compound, conditional, or comparison operation. Push the arithmetic
9867 operation inside the compound or conditional to see if any folding
9868 can then be done. Convert comparison to conditional for this purpose.
9869 The also optimizes non-constant cases that used to be done in
9870 expand_expr.
9872 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9873 one of the operands is a comparison and the other is a comparison, a
9874 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9875 code below would make the expression more complex. Change it to a
9876 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9877 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9879 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9880 || code == EQ_EXPR || code == NE_EXPR)
9881 && TREE_CODE (type) != VECTOR_TYPE
9882 && ((truth_value_p (TREE_CODE (arg0))
9883 && (truth_value_p (TREE_CODE (arg1))
9884 || (TREE_CODE (arg1) == BIT_AND_EXPR
9885 && integer_onep (TREE_OPERAND (arg1, 1)))))
9886 || (truth_value_p (TREE_CODE (arg1))
9887 && (truth_value_p (TREE_CODE (arg0))
9888 || (TREE_CODE (arg0) == BIT_AND_EXPR
9889 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9891 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9892 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9893 : TRUTH_XOR_EXPR,
9894 boolean_type_node,
9895 fold_convert_loc (loc, boolean_type_node, arg0),
9896 fold_convert_loc (loc, boolean_type_node, arg1));
9898 if (code == EQ_EXPR)
9899 tem = invert_truthvalue_loc (loc, tem);
9901 return fold_convert_loc (loc, type, tem);
9904 if (TREE_CODE_CLASS (code) == tcc_binary
9905 || TREE_CODE_CLASS (code) == tcc_comparison)
9907 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9909 tem = fold_build2_loc (loc, code, type,
9910 fold_convert_loc (loc, TREE_TYPE (op0),
9911 TREE_OPERAND (arg0, 1)), op1);
9912 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9913 tem);
9915 if (TREE_CODE (arg1) == COMPOUND_EXPR
9916 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9918 tem = fold_build2_loc (loc, code, type, op0,
9919 fold_convert_loc (loc, TREE_TYPE (op1),
9920 TREE_OPERAND (arg1, 1)));
9921 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9922 tem);
9925 if (TREE_CODE (arg0) == COND_EXPR
9926 || TREE_CODE (arg0) == VEC_COND_EXPR
9927 || COMPARISON_CLASS_P (arg0))
9929 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9930 arg0, arg1,
9931 /*cond_first_p=*/1);
9932 if (tem != NULL_TREE)
9933 return tem;
9936 if (TREE_CODE (arg1) == COND_EXPR
9937 || TREE_CODE (arg1) == VEC_COND_EXPR
9938 || COMPARISON_CLASS_P (arg1))
9940 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9941 arg1, arg0,
9942 /*cond_first_p=*/0);
9943 if (tem != NULL_TREE)
9944 return tem;
9948 switch (code)
9950 case MEM_REF:
9951 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9952 if (TREE_CODE (arg0) == ADDR_EXPR
9953 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9955 tree iref = TREE_OPERAND (arg0, 0);
9956 return fold_build2 (MEM_REF, type,
9957 TREE_OPERAND (iref, 0),
9958 int_const_binop (PLUS_EXPR, arg1,
9959 TREE_OPERAND (iref, 1)));
9962 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9963 if (TREE_CODE (arg0) == ADDR_EXPR
9964 && handled_component_p (TREE_OPERAND (arg0, 0)))
9966 tree base;
9967 HOST_WIDE_INT coffset;
9968 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9969 &coffset);
9970 if (!base)
9971 return NULL_TREE;
9972 return fold_build2 (MEM_REF, type,
9973 build_fold_addr_expr (base),
9974 int_const_binop (PLUS_EXPR, arg1,
9975 size_int (coffset)));
9978 return NULL_TREE;
9980 case POINTER_PLUS_EXPR:
9981 /* 0 +p index -> (type)index */
9982 if (integer_zerop (arg0))
9983 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9985 /* PTR +p 0 -> PTR */
9986 if (integer_zerop (arg1))
9987 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9989 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9990 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9991 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9992 return fold_convert_loc (loc, type,
9993 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9994 fold_convert_loc (loc, sizetype,
9995 arg1),
9996 fold_convert_loc (loc, sizetype,
9997 arg0)));
9999 /* (PTR +p B) +p A -> PTR +p (B + A) */
10000 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10001 && !upc_shared_type_p (TREE_TYPE (type)))
10003 tree inner;
10004 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10005 tree arg00 = TREE_OPERAND (arg0, 0);
10006 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10007 arg01, fold_convert_loc (loc, sizetype, arg1));
10008 return fold_convert_loc (loc, type,
10009 fold_build_pointer_plus_loc (loc,
10010 arg00, inner));
10013 /* PTR_CST +p CST -> CST1 */
10014 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10015 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10016 fold_convert_loc (loc, type, arg1));
10018 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10019 of the array. Loop optimizer sometimes produce this type of
10020 expressions. */
10021 if (TREE_CODE (arg0) == ADDR_EXPR)
10023 tem = try_move_mult_to_index (loc, arg0,
10024 fold_convert_loc (loc,
10025 ssizetype, arg1));
10026 if (tem)
10027 return fold_convert_loc (loc, type, tem);
10030 return NULL_TREE;
10032 case PLUS_EXPR:
10033 /* A + (-B) -> A - B */
10034 if (TREE_CODE (arg1) == NEGATE_EXPR)
10035 return fold_build2_loc (loc, MINUS_EXPR, type,
10036 fold_convert_loc (loc, type, arg0),
10037 fold_convert_loc (loc, type,
10038 TREE_OPERAND (arg1, 0)));
10039 /* (-A) + B -> B - A */
10040 if (TREE_CODE (arg0) == NEGATE_EXPR
10041 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10042 return fold_build2_loc (loc, MINUS_EXPR, type,
10043 fold_convert_loc (loc, type, arg1),
10044 fold_convert_loc (loc, type,
10045 TREE_OPERAND (arg0, 0)));
10047 /* Disable further optimizations involving UPC shared pointers,
10048 because integers are not interoperable with shared pointers. */
10049 if ((TREE_TYPE (arg0) && POINTER_TYPE_P (TREE_TYPE (arg0))
10050 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg0))))
10051 || (TREE_TYPE (arg1) && POINTER_TYPE_P (TREE_TYPE (arg1))
10052 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg1)))))
10053 return NULL_TREE;
10055 if (INTEGRAL_TYPE_P (type))
10057 /* Convert ~A + 1 to -A. */
10058 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10059 && integer_onep (arg1))
10060 return fold_build1_loc (loc, NEGATE_EXPR, type,
10061 fold_convert_loc (loc, type,
10062 TREE_OPERAND (arg0, 0)));
10064 /* ~X + X is -1. */
10065 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10066 && !TYPE_OVERFLOW_TRAPS (type))
10068 tree tem = TREE_OPERAND (arg0, 0);
10070 STRIP_NOPS (tem);
10071 if (operand_equal_p (tem, arg1, 0))
10073 t1 = build_int_cst_type (type, -1);
10074 return omit_one_operand_loc (loc, type, t1, arg1);
10078 /* X + ~X is -1. */
10079 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10080 && !TYPE_OVERFLOW_TRAPS (type))
10082 tree tem = TREE_OPERAND (arg1, 0);
10084 STRIP_NOPS (tem);
10085 if (operand_equal_p (arg0, tem, 0))
10087 t1 = build_int_cst_type (type, -1);
10088 return omit_one_operand_loc (loc, type, t1, arg0);
10092 /* X + (X / CST) * -CST is X % CST. */
10093 if (TREE_CODE (arg1) == MULT_EXPR
10094 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10095 && operand_equal_p (arg0,
10096 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10098 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10099 tree cst1 = TREE_OPERAND (arg1, 1);
10100 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10101 cst1, cst0);
10102 if (sum && integer_zerop (sum))
10103 return fold_convert_loc (loc, type,
10104 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10105 TREE_TYPE (arg0), arg0,
10106 cst0));
10110 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10111 one. Make sure the type is not saturating and has the signedness of
10112 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10113 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10114 if ((TREE_CODE (arg0) == MULT_EXPR
10115 || TREE_CODE (arg1) == MULT_EXPR)
10116 && !TYPE_SATURATING (type)
10117 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10118 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10119 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10121 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10122 if (tem)
10123 return tem;
10126 if (! FLOAT_TYPE_P (type))
10128 if (integer_zerop (arg1))
10129 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10131 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10132 with a constant, and the two constants have no bits in common,
10133 we should treat this as a BIT_IOR_EXPR since this may produce more
10134 simplifications. */
10135 if (TREE_CODE (arg0) == BIT_AND_EXPR
10136 && TREE_CODE (arg1) == BIT_AND_EXPR
10137 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10138 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10139 && integer_zerop (const_binop (BIT_AND_EXPR,
10140 TREE_OPERAND (arg0, 1),
10141 TREE_OPERAND (arg1, 1))))
10143 code = BIT_IOR_EXPR;
10144 goto bit_ior;
10147 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10148 (plus (plus (mult) (mult)) (foo)) so that we can
10149 take advantage of the factoring cases below. */
10150 if (TYPE_OVERFLOW_WRAPS (type)
10151 && (((TREE_CODE (arg0) == PLUS_EXPR
10152 || TREE_CODE (arg0) == MINUS_EXPR)
10153 && TREE_CODE (arg1) == MULT_EXPR)
10154 || ((TREE_CODE (arg1) == PLUS_EXPR
10155 || TREE_CODE (arg1) == MINUS_EXPR)
10156 && TREE_CODE (arg0) == MULT_EXPR)))
10158 tree parg0, parg1, parg, marg;
10159 enum tree_code pcode;
10161 if (TREE_CODE (arg1) == MULT_EXPR)
10162 parg = arg0, marg = arg1;
10163 else
10164 parg = arg1, marg = arg0;
10165 pcode = TREE_CODE (parg);
10166 parg0 = TREE_OPERAND (parg, 0);
10167 parg1 = TREE_OPERAND (parg, 1);
10168 STRIP_NOPS (parg0);
10169 STRIP_NOPS (parg1);
10171 if (TREE_CODE (parg0) == MULT_EXPR
10172 && TREE_CODE (parg1) != MULT_EXPR)
10173 return fold_build2_loc (loc, pcode, type,
10174 fold_build2_loc (loc, PLUS_EXPR, type,
10175 fold_convert_loc (loc, type,
10176 parg0),
10177 fold_convert_loc (loc, type,
10178 marg)),
10179 fold_convert_loc (loc, type, parg1));
10180 if (TREE_CODE (parg0) != MULT_EXPR
10181 && TREE_CODE (parg1) == MULT_EXPR)
10182 return
10183 fold_build2_loc (loc, PLUS_EXPR, type,
10184 fold_convert_loc (loc, type, parg0),
10185 fold_build2_loc (loc, pcode, type,
10186 fold_convert_loc (loc, type, marg),
10187 fold_convert_loc (loc, type,
10188 parg1)));
10191 else
10193 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10194 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10195 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10197 /* Likewise if the operands are reversed. */
10198 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10199 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10201 /* Convert X + -C into X - C. */
10202 if (TREE_CODE (arg1) == REAL_CST
10203 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10205 tem = fold_negate_const (arg1, type);
10206 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10207 return fold_build2_loc (loc, MINUS_EXPR, type,
10208 fold_convert_loc (loc, type, arg0),
10209 fold_convert_loc (loc, type, tem));
10212 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10213 to __complex__ ( x, y ). This is not the same for SNaNs or
10214 if signed zeros are involved. */
10215 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10216 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10217 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10219 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10220 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10221 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10222 bool arg0rz = false, arg0iz = false;
10223 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10224 || (arg0i && (arg0iz = real_zerop (arg0i))))
10226 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10227 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10228 if (arg0rz && arg1i && real_zerop (arg1i))
10230 tree rp = arg1r ? arg1r
10231 : build1 (REALPART_EXPR, rtype, arg1);
10232 tree ip = arg0i ? arg0i
10233 : build1 (IMAGPART_EXPR, rtype, arg0);
10234 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10236 else if (arg0iz && arg1r && real_zerop (arg1r))
10238 tree rp = arg0r ? arg0r
10239 : build1 (REALPART_EXPR, rtype, arg0);
10240 tree ip = arg1i ? arg1i
10241 : build1 (IMAGPART_EXPR, rtype, arg1);
10242 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10247 if (flag_unsafe_math_optimizations
10248 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10249 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10250 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10251 return tem;
10253 /* Convert x+x into x*2.0. */
10254 if (operand_equal_p (arg0, arg1, 0)
10255 && SCALAR_FLOAT_TYPE_P (type))
10256 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10257 build_real (type, dconst2));
10259 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10260 We associate floats only if the user has specified
10261 -fassociative-math. */
10262 if (flag_associative_math
10263 && TREE_CODE (arg1) == PLUS_EXPR
10264 && TREE_CODE (arg0) != MULT_EXPR)
10266 tree tree10 = TREE_OPERAND (arg1, 0);
10267 tree tree11 = TREE_OPERAND (arg1, 1);
10268 if (TREE_CODE (tree11) == MULT_EXPR
10269 && TREE_CODE (tree10) == MULT_EXPR)
10271 tree tree0;
10272 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10273 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10276 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10277 We associate floats only if the user has specified
10278 -fassociative-math. */
10279 if (flag_associative_math
10280 && TREE_CODE (arg0) == PLUS_EXPR
10281 && TREE_CODE (arg1) != MULT_EXPR)
10283 tree tree00 = TREE_OPERAND (arg0, 0);
10284 tree tree01 = TREE_OPERAND (arg0, 1);
10285 if (TREE_CODE (tree01) == MULT_EXPR
10286 && TREE_CODE (tree00) == MULT_EXPR)
10288 tree tree0;
10289 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10290 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10295 bit_rotate:
10296 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10297 is a rotate of A by C1 bits. */
10298 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10299 is a rotate of A by B bits. */
10301 enum tree_code code0, code1;
10302 tree rtype;
10303 code0 = TREE_CODE (arg0);
10304 code1 = TREE_CODE (arg1);
10305 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10306 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10307 && operand_equal_p (TREE_OPERAND (arg0, 0),
10308 TREE_OPERAND (arg1, 0), 0)
10309 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10310 TYPE_UNSIGNED (rtype))
10311 /* Only create rotates in complete modes. Other cases are not
10312 expanded properly. */
10313 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10315 tree tree01, tree11;
10316 enum tree_code code01, code11;
10318 tree01 = TREE_OPERAND (arg0, 1);
10319 tree11 = TREE_OPERAND (arg1, 1);
10320 STRIP_NOPS (tree01);
10321 STRIP_NOPS (tree11);
10322 code01 = TREE_CODE (tree01);
10323 code11 = TREE_CODE (tree11);
10324 if (code01 == INTEGER_CST
10325 && code11 == INTEGER_CST
10326 && TREE_INT_CST_HIGH (tree01) == 0
10327 && TREE_INT_CST_HIGH (tree11) == 0
10328 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10329 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10331 tem = build2_loc (loc, LROTATE_EXPR,
10332 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10333 TREE_OPERAND (arg0, 0),
10334 code0 == LSHIFT_EXPR ? tree01 : tree11);
10335 return fold_convert_loc (loc, type, tem);
10337 else if (code11 == MINUS_EXPR)
10339 tree tree110, tree111;
10340 tree110 = TREE_OPERAND (tree11, 0);
10341 tree111 = TREE_OPERAND (tree11, 1);
10342 STRIP_NOPS (tree110);
10343 STRIP_NOPS (tree111);
10344 if (TREE_CODE (tree110) == INTEGER_CST
10345 && 0 == compare_tree_int (tree110,
10346 TYPE_PRECISION
10347 (TREE_TYPE (TREE_OPERAND
10348 (arg0, 0))))
10349 && operand_equal_p (tree01, tree111, 0))
10350 return
10351 fold_convert_loc (loc, type,
10352 build2 ((code0 == LSHIFT_EXPR
10353 ? LROTATE_EXPR
10354 : RROTATE_EXPR),
10355 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10356 TREE_OPERAND (arg0, 0), tree01));
10358 else if (code01 == MINUS_EXPR)
10360 tree tree010, tree011;
10361 tree010 = TREE_OPERAND (tree01, 0);
10362 tree011 = TREE_OPERAND (tree01, 1);
10363 STRIP_NOPS (tree010);
10364 STRIP_NOPS (tree011);
10365 if (TREE_CODE (tree010) == INTEGER_CST
10366 && 0 == compare_tree_int (tree010,
10367 TYPE_PRECISION
10368 (TREE_TYPE (TREE_OPERAND
10369 (arg0, 0))))
10370 && operand_equal_p (tree11, tree011, 0))
10371 return fold_convert_loc
10372 (loc, type,
10373 build2 ((code0 != LSHIFT_EXPR
10374 ? LROTATE_EXPR
10375 : RROTATE_EXPR),
10376 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10377 TREE_OPERAND (arg0, 0), tree11));
10382 associate:
10383 /* In most languages, can't associate operations on floats through
10384 parentheses. Rather than remember where the parentheses were, we
10385 don't associate floats at all, unless the user has specified
10386 -fassociative-math.
10387 And, we need to make sure type is not saturating. */
10389 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10390 && !TYPE_SATURATING (type))
10392 tree var0, con0, lit0, minus_lit0;
10393 tree var1, con1, lit1, minus_lit1;
10394 tree atype = type;
10395 bool ok = true;
10397 /* Split both trees into variables, constants, and literals. Then
10398 associate each group together, the constants with literals,
10399 then the result with variables. This increases the chances of
10400 literals being recombined later and of generating relocatable
10401 expressions for the sum of a constant and literal. */
10402 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10403 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10404 code == MINUS_EXPR);
10406 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10407 if (code == MINUS_EXPR)
10408 code = PLUS_EXPR;
10410 /* With undefined overflow prefer doing association in a type
10411 which wraps on overflow, if that is one of the operand types. */
10412 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10413 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10415 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10416 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10417 atype = TREE_TYPE (arg0);
10418 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10419 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10420 atype = TREE_TYPE (arg1);
10421 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10424 /* With undefined overflow we can only associate constants with one
10425 variable, and constants whose association doesn't overflow. */
10426 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10427 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10429 if (var0 && var1)
10431 tree tmp0 = var0;
10432 tree tmp1 = var1;
10434 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10435 tmp0 = TREE_OPERAND (tmp0, 0);
10436 if (CONVERT_EXPR_P (tmp0)
10437 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10438 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10439 <= TYPE_PRECISION (atype)))
10440 tmp0 = TREE_OPERAND (tmp0, 0);
10441 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10442 tmp1 = TREE_OPERAND (tmp1, 0);
10443 if (CONVERT_EXPR_P (tmp1)
10444 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10445 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10446 <= TYPE_PRECISION (atype)))
10447 tmp1 = TREE_OPERAND (tmp1, 0);
10448 /* The only case we can still associate with two variables
10449 is if they are the same, modulo negation and bit-pattern
10450 preserving conversions. */
10451 if (!operand_equal_p (tmp0, tmp1, 0))
10452 ok = false;
10456 /* Only do something if we found more than two objects. Otherwise,
10457 nothing has changed and we risk infinite recursion. */
10458 if (ok
10459 && (2 < ((var0 != 0) + (var1 != 0)
10460 + (con0 != 0) + (con1 != 0)
10461 + (lit0 != 0) + (lit1 != 0)
10462 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10464 bool any_overflows = false;
10465 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10466 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10467 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10468 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10469 var0 = associate_trees (loc, var0, var1, code, atype);
10470 con0 = associate_trees (loc, con0, con1, code, atype);
10471 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10472 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10473 code, atype);
10475 /* Preserve the MINUS_EXPR if the negative part of the literal is
10476 greater than the positive part. Otherwise, the multiplicative
10477 folding code (i.e extract_muldiv) may be fooled in case
10478 unsigned constants are subtracted, like in the following
10479 example: ((X*2 + 4) - 8U)/2. */
10480 if (minus_lit0 && lit0)
10482 if (TREE_CODE (lit0) == INTEGER_CST
10483 && TREE_CODE (minus_lit0) == INTEGER_CST
10484 && tree_int_cst_lt (lit0, minus_lit0))
10486 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10487 MINUS_EXPR, atype);
10488 lit0 = 0;
10490 else
10492 lit0 = associate_trees (loc, lit0, minus_lit0,
10493 MINUS_EXPR, atype);
10494 minus_lit0 = 0;
10498 /* Don't introduce overflows through reassociation. */
10499 if (!any_overflows
10500 && ((lit0 && TREE_OVERFLOW (lit0))
10501 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10502 return NULL_TREE;
10504 if (minus_lit0)
10506 if (con0 == 0)
10507 return
10508 fold_convert_loc (loc, type,
10509 associate_trees (loc, var0, minus_lit0,
10510 MINUS_EXPR, atype));
10511 else
10513 con0 = associate_trees (loc, con0, minus_lit0,
10514 MINUS_EXPR, atype);
10515 return
10516 fold_convert_loc (loc, type,
10517 associate_trees (loc, var0, con0,
10518 PLUS_EXPR, atype));
10522 con0 = associate_trees (loc, con0, lit0, code, atype);
10523 return
10524 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10525 code, atype));
10529 return NULL_TREE;
10531 case MINUS_EXPR:
10532 /* Pointer simplifications for subtraction, simple reassociations. */
10533 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10535 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10536 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10537 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10539 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10540 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10541 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10542 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10543 return fold_build2_loc (loc, PLUS_EXPR, type,
10544 fold_build2_loc (loc, MINUS_EXPR, type,
10545 arg00, arg10),
10546 fold_build2_loc (loc, MINUS_EXPR, type,
10547 arg01, arg11));
10549 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10550 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10552 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10553 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10554 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10555 fold_convert_loc (loc, type, arg1));
10556 if (tmp)
10557 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10560 /* A - (-B) -> A + B */
10561 if (TREE_CODE (arg1) == NEGATE_EXPR)
10562 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10563 fold_convert_loc (loc, type,
10564 TREE_OPERAND (arg1, 0)));
10566 /* Disable further optimizations involving UPC shared pointers,
10567 because integers are not interoperable with shared pointers.
10568 (The test below also detects pointer difference between
10569 shared pointers, which cannot be folded. */
10571 if (TREE_TYPE (arg0) && POINTER_TYPE_P (TREE_TYPE (arg0))
10572 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg0))))
10573 return NULL_TREE;
10575 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10576 if (TREE_CODE (arg0) == NEGATE_EXPR
10577 && (FLOAT_TYPE_P (type)
10578 || INTEGRAL_TYPE_P (type))
10579 && negate_expr_p (arg1)
10580 && reorder_operands_p (arg0, arg1))
10581 return fold_build2_loc (loc, MINUS_EXPR, type,
10582 fold_convert_loc (loc, type,
10583 negate_expr (arg1)),
10584 fold_convert_loc (loc, type,
10585 TREE_OPERAND (arg0, 0)));
10586 /* Convert -A - 1 to ~A. */
10587 if (INTEGRAL_TYPE_P (type)
10588 && TREE_CODE (arg0) == NEGATE_EXPR
10589 && integer_onep (arg1)
10590 && !TYPE_OVERFLOW_TRAPS (type))
10591 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10592 fold_convert_loc (loc, type,
10593 TREE_OPERAND (arg0, 0)));
10595 /* Convert -1 - A to ~A. */
10596 if (INTEGRAL_TYPE_P (type)
10597 && integer_all_onesp (arg0))
10598 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10601 /* X - (X / CST) * CST is X % CST. */
10602 if (INTEGRAL_TYPE_P (type)
10603 && TREE_CODE (arg1) == MULT_EXPR
10604 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10605 && operand_equal_p (arg0,
10606 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10607 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10608 TREE_OPERAND (arg1, 1), 0))
10609 return
10610 fold_convert_loc (loc, type,
10611 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10612 arg0, TREE_OPERAND (arg1, 1)));
10614 if (! FLOAT_TYPE_P (type))
10616 if (integer_zerop (arg0))
10617 return negate_expr (fold_convert_loc (loc, type, arg1));
10618 if (integer_zerop (arg1))
10619 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10621 /* Fold A - (A & B) into ~B & A. */
10622 if (!TREE_SIDE_EFFECTS (arg0)
10623 && TREE_CODE (arg1) == BIT_AND_EXPR)
10625 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10627 tree arg10 = fold_convert_loc (loc, type,
10628 TREE_OPERAND (arg1, 0));
10629 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10630 fold_build1_loc (loc, BIT_NOT_EXPR,
10631 type, arg10),
10632 fold_convert_loc (loc, type, arg0));
10634 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10636 tree arg11 = fold_convert_loc (loc,
10637 type, TREE_OPERAND (arg1, 1));
10638 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10639 fold_build1_loc (loc, BIT_NOT_EXPR,
10640 type, arg11),
10641 fold_convert_loc (loc, type, arg0));
10645 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10646 any power of 2 minus 1. */
10647 if (TREE_CODE (arg0) == BIT_AND_EXPR
10648 && TREE_CODE (arg1) == BIT_AND_EXPR
10649 && operand_equal_p (TREE_OPERAND (arg0, 0),
10650 TREE_OPERAND (arg1, 0), 0))
10652 tree mask0 = TREE_OPERAND (arg0, 1);
10653 tree mask1 = TREE_OPERAND (arg1, 1);
10654 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10656 if (operand_equal_p (tem, mask1, 0))
10658 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10659 TREE_OPERAND (arg0, 0), mask1);
10660 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10665 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10666 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10667 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10669 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10670 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10671 (-ARG1 + ARG0) reduces to -ARG1. */
10672 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10673 return negate_expr (fold_convert_loc (loc, type, arg1));
10675 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10676 __complex__ ( x, -y ). This is not the same for SNaNs or if
10677 signed zeros are involved. */
10678 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10679 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10680 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10682 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10683 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10684 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10685 bool arg0rz = false, arg0iz = false;
10686 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10687 || (arg0i && (arg0iz = real_zerop (arg0i))))
10689 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10690 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10691 if (arg0rz && arg1i && real_zerop (arg1i))
10693 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10694 arg1r ? arg1r
10695 : build1 (REALPART_EXPR, rtype, arg1));
10696 tree ip = arg0i ? arg0i
10697 : build1 (IMAGPART_EXPR, rtype, arg0);
10698 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10700 else if (arg0iz && arg1r && real_zerop (arg1r))
10702 tree rp = arg0r ? arg0r
10703 : build1 (REALPART_EXPR, rtype, arg0);
10704 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10705 arg1i ? arg1i
10706 : build1 (IMAGPART_EXPR, rtype, arg1));
10707 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10712 /* Fold &x - &x. This can happen from &x.foo - &x.
10713 This is unsafe for certain floats even in non-IEEE formats.
10714 In IEEE, it is unsafe because it does wrong for NaNs.
10715 Also note that operand_equal_p is always false if an operand
10716 is volatile. */
10718 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10719 && operand_equal_p (arg0, arg1, 0))
10720 return build_zero_cst (type);
10722 /* A - B -> A + (-B) if B is easily negatable. */
10723 if (negate_expr_p (arg1)
10724 && ((FLOAT_TYPE_P (type)
10725 /* Avoid this transformation if B is a positive REAL_CST. */
10726 && (TREE_CODE (arg1) != REAL_CST
10727 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10728 || INTEGRAL_TYPE_P (type)))
10729 return fold_build2_loc (loc, PLUS_EXPR, type,
10730 fold_convert_loc (loc, type, arg0),
10731 fold_convert_loc (loc, type,
10732 negate_expr (arg1)));
10734 /* Try folding difference of addresses. */
10736 HOST_WIDE_INT diff;
10738 if ((TREE_CODE (arg0) == ADDR_EXPR
10739 || TREE_CODE (arg1) == ADDR_EXPR)
10740 && ptr_difference_const (arg0, arg1, &diff))
10741 return build_int_cst_type (type, diff);
10744 /* Fold &a[i] - &a[j] to i-j. */
10745 if (TREE_CODE (arg0) == ADDR_EXPR
10746 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10747 && TREE_CODE (arg1) == ADDR_EXPR
10748 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10750 tree tem = fold_addr_of_array_ref_difference (loc, type,
10751 TREE_OPERAND (arg0, 0),
10752 TREE_OPERAND (arg1, 0));
10753 if (tem)
10754 return tem;
10757 if (FLOAT_TYPE_P (type)
10758 && flag_unsafe_math_optimizations
10759 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10760 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10761 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10762 return tem;
10764 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10765 one. Make sure the type is not saturating and has the signedness of
10766 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10767 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10768 if ((TREE_CODE (arg0) == MULT_EXPR
10769 || TREE_CODE (arg1) == MULT_EXPR)
10770 && !TYPE_SATURATING (type)
10771 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10772 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10773 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10775 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10776 if (tem)
10777 return tem;
10780 goto associate;
10782 case MULT_EXPR:
10783 /* (-A) * (-B) -> A * B */
10784 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10785 return fold_build2_loc (loc, MULT_EXPR, type,
10786 fold_convert_loc (loc, type,
10787 TREE_OPERAND (arg0, 0)),
10788 fold_convert_loc (loc, type,
10789 negate_expr (arg1)));
10790 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10791 return fold_build2_loc (loc, MULT_EXPR, type,
10792 fold_convert_loc (loc, type,
10793 negate_expr (arg0)),
10794 fold_convert_loc (loc, type,
10795 TREE_OPERAND (arg1, 0)));
10797 if (! FLOAT_TYPE_P (type))
10799 if (integer_zerop (arg1))
10800 return omit_one_operand_loc (loc, type, arg1, arg0);
10801 if (integer_onep (arg1))
10802 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10803 /* Transform x * -1 into -x. Make sure to do the negation
10804 on the original operand with conversions not stripped
10805 because we can only strip non-sign-changing conversions. */
10806 if (integer_all_onesp (arg1))
10807 return fold_convert_loc (loc, type, negate_expr (op0));
10808 /* Transform x * -C into -x * C if x is easily negatable. */
10809 if (TREE_CODE (arg1) == INTEGER_CST
10810 && tree_int_cst_sgn (arg1) == -1
10811 && negate_expr_p (arg0)
10812 && (tem = negate_expr (arg1)) != arg1
10813 && !TREE_OVERFLOW (tem))
10814 return fold_build2_loc (loc, MULT_EXPR, type,
10815 fold_convert_loc (loc, type,
10816 negate_expr (arg0)),
10817 tem);
10819 /* (a * (1 << b)) is (a << b) */
10820 if (TREE_CODE (arg1) == LSHIFT_EXPR
10821 && integer_onep (TREE_OPERAND (arg1, 0)))
10822 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10823 TREE_OPERAND (arg1, 1));
10824 if (TREE_CODE (arg0) == LSHIFT_EXPR
10825 && integer_onep (TREE_OPERAND (arg0, 0)))
10826 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10827 TREE_OPERAND (arg0, 1));
10829 /* (A + A) * C -> A * 2 * C */
10830 if (TREE_CODE (arg0) == PLUS_EXPR
10831 && TREE_CODE (arg1) == INTEGER_CST
10832 && operand_equal_p (TREE_OPERAND (arg0, 0),
10833 TREE_OPERAND (arg0, 1), 0))
10834 return fold_build2_loc (loc, MULT_EXPR, type,
10835 omit_one_operand_loc (loc, type,
10836 TREE_OPERAND (arg0, 0),
10837 TREE_OPERAND (arg0, 1)),
10838 fold_build2_loc (loc, MULT_EXPR, type,
10839 build_int_cst (type, 2) , arg1));
10841 strict_overflow_p = false;
10842 if (TREE_CODE (arg1) == INTEGER_CST
10843 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10844 &strict_overflow_p)))
10846 if (strict_overflow_p)
10847 fold_overflow_warning (("assuming signed overflow does not "
10848 "occur when simplifying "
10849 "multiplication"),
10850 WARN_STRICT_OVERFLOW_MISC);
10851 return fold_convert_loc (loc, type, tem);
10854 /* Optimize z * conj(z) for integer complex numbers. */
10855 if (TREE_CODE (arg0) == CONJ_EXPR
10856 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10857 return fold_mult_zconjz (loc, type, arg1);
10858 if (TREE_CODE (arg1) == CONJ_EXPR
10859 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10860 return fold_mult_zconjz (loc, type, arg0);
10862 else
10864 /* Maybe fold x * 0 to 0. The expressions aren't the same
10865 when x is NaN, since x * 0 is also NaN. Nor are they the
10866 same in modes with signed zeros, since multiplying a
10867 negative value by 0 gives -0, not +0. */
10868 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10869 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10870 && real_zerop (arg1))
10871 return omit_one_operand_loc (loc, type, arg1, arg0);
10872 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10873 Likewise for complex arithmetic with signed zeros. */
10874 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10875 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10876 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10877 && real_onep (arg1))
10878 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10880 /* Transform x * -1.0 into -x. */
10881 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10882 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10883 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10884 && real_minus_onep (arg1))
10885 return fold_convert_loc (loc, type, negate_expr (arg0));
10887 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10888 the result for floating point types due to rounding so it is applied
10889 only if -fassociative-math was specify. */
10890 if (flag_associative_math
10891 && TREE_CODE (arg0) == RDIV_EXPR
10892 && TREE_CODE (arg1) == REAL_CST
10893 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10895 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10896 arg1);
10897 if (tem)
10898 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10899 TREE_OPERAND (arg0, 1));
10902 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10903 if (operand_equal_p (arg0, arg1, 0))
10905 tree tem = fold_strip_sign_ops (arg0);
10906 if (tem != NULL_TREE)
10908 tem = fold_convert_loc (loc, type, tem);
10909 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10913 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10914 This is not the same for NaNs or if signed zeros are
10915 involved. */
10916 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10917 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10918 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10919 && TREE_CODE (arg1) == COMPLEX_CST
10920 && real_zerop (TREE_REALPART (arg1)))
10922 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10923 if (real_onep (TREE_IMAGPART (arg1)))
10924 return
10925 fold_build2_loc (loc, COMPLEX_EXPR, type,
10926 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10927 rtype, arg0)),
10928 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10929 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10930 return
10931 fold_build2_loc (loc, COMPLEX_EXPR, type,
10932 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10933 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10934 rtype, arg0)));
10937 /* Optimize z * conj(z) for floating point complex numbers.
10938 Guarded by flag_unsafe_math_optimizations as non-finite
10939 imaginary components don't produce scalar results. */
10940 if (flag_unsafe_math_optimizations
10941 && TREE_CODE (arg0) == CONJ_EXPR
10942 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10943 return fold_mult_zconjz (loc, type, arg1);
10944 if (flag_unsafe_math_optimizations
10945 && TREE_CODE (arg1) == CONJ_EXPR
10946 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10947 return fold_mult_zconjz (loc, type, arg0);
10949 if (flag_unsafe_math_optimizations)
10951 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10952 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10954 /* Optimizations of root(...)*root(...). */
10955 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10957 tree rootfn, arg;
10958 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10959 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10961 /* Optimize sqrt(x)*sqrt(x) as x. */
10962 if (BUILTIN_SQRT_P (fcode0)
10963 && operand_equal_p (arg00, arg10, 0)
10964 && ! HONOR_SNANS (TYPE_MODE (type)))
10965 return arg00;
10967 /* Optimize root(x)*root(y) as root(x*y). */
10968 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10969 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10970 return build_call_expr_loc (loc, rootfn, 1, arg);
10973 /* Optimize expN(x)*expN(y) as expN(x+y). */
10974 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10976 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10977 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10978 CALL_EXPR_ARG (arg0, 0),
10979 CALL_EXPR_ARG (arg1, 0));
10980 return build_call_expr_loc (loc, expfn, 1, arg);
10983 /* Optimizations of pow(...)*pow(...). */
10984 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10985 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10986 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10988 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10989 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10990 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10991 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10993 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10994 if (operand_equal_p (arg01, arg11, 0))
10996 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10997 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10998 arg00, arg10);
10999 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11002 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11003 if (operand_equal_p (arg00, arg10, 0))
11005 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11006 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11007 arg01, arg11);
11008 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11012 /* Optimize tan(x)*cos(x) as sin(x). */
11013 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11014 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11015 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11016 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11017 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11018 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11019 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11020 CALL_EXPR_ARG (arg1, 0), 0))
11022 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11024 if (sinfn != NULL_TREE)
11025 return build_call_expr_loc (loc, sinfn, 1,
11026 CALL_EXPR_ARG (arg0, 0));
11029 /* Optimize x*pow(x,c) as pow(x,c+1). */
11030 if (fcode1 == BUILT_IN_POW
11031 || fcode1 == BUILT_IN_POWF
11032 || fcode1 == BUILT_IN_POWL)
11034 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11035 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11036 if (TREE_CODE (arg11) == REAL_CST
11037 && !TREE_OVERFLOW (arg11)
11038 && operand_equal_p (arg0, arg10, 0))
11040 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11041 REAL_VALUE_TYPE c;
11042 tree arg;
11044 c = TREE_REAL_CST (arg11);
11045 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11046 arg = build_real (type, c);
11047 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11051 /* Optimize pow(x,c)*x as pow(x,c+1). */
11052 if (fcode0 == BUILT_IN_POW
11053 || fcode0 == BUILT_IN_POWF
11054 || fcode0 == BUILT_IN_POWL)
11056 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11057 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11058 if (TREE_CODE (arg01) == REAL_CST
11059 && !TREE_OVERFLOW (arg01)
11060 && operand_equal_p (arg1, arg00, 0))
11062 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11063 REAL_VALUE_TYPE c;
11064 tree arg;
11066 c = TREE_REAL_CST (arg01);
11067 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11068 arg = build_real (type, c);
11069 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11073 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11074 if (!in_gimple_form
11075 && optimize
11076 && operand_equal_p (arg0, arg1, 0))
11078 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11080 if (powfn)
11082 tree arg = build_real (type, dconst2);
11083 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11088 goto associate;
11090 case BIT_IOR_EXPR:
11091 bit_ior:
11092 if (integer_all_onesp (arg1))
11093 return omit_one_operand_loc (loc, type, arg1, arg0);
11094 if (integer_zerop (arg1))
11095 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11096 if (operand_equal_p (arg0, arg1, 0))
11097 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11099 /* ~X | X is -1. */
11100 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11101 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11103 t1 = build_zero_cst (type);
11104 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11105 return omit_one_operand_loc (loc, type, t1, arg1);
11108 /* X | ~X is -1. */
11109 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11110 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11112 t1 = build_zero_cst (type);
11113 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11114 return omit_one_operand_loc (loc, type, t1, arg0);
11117 /* Canonicalize (X & C1) | C2. */
11118 if (TREE_CODE (arg0) == BIT_AND_EXPR
11119 && TREE_CODE (arg1) == INTEGER_CST
11120 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11122 double_int c1, c2, c3, msk;
11123 int width = TYPE_PRECISION (type), w;
11124 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11125 c2 = tree_to_double_int (arg1);
11127 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11128 if ((c1 & c2) == c1)
11129 return omit_one_operand_loc (loc, type, arg1,
11130 TREE_OPERAND (arg0, 0));
11132 msk = double_int::mask (width);
11134 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11135 if (msk.and_not (c1 | c2).is_zero ())
11136 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11137 TREE_OPERAND (arg0, 0), arg1);
11139 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11140 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11141 mode which allows further optimizations. */
11142 c1 &= msk;
11143 c2 &= msk;
11144 c3 = c1.and_not (c2);
11145 for (w = BITS_PER_UNIT;
11146 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11147 w <<= 1)
11149 unsigned HOST_WIDE_INT mask
11150 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11151 if (((c1.low | c2.low) & mask) == mask
11152 && (c1.low & ~mask) == 0 && c1.high == 0)
11154 c3 = double_int::from_uhwi (mask);
11155 break;
11158 if (c3 != c1)
11159 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11160 fold_build2_loc (loc, BIT_AND_EXPR, type,
11161 TREE_OPERAND (arg0, 0),
11162 double_int_to_tree (type,
11163 c3)),
11164 arg1);
11167 /* (X & Y) | Y is (X, Y). */
11168 if (TREE_CODE (arg0) == BIT_AND_EXPR
11169 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11170 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11171 /* (X & Y) | X is (Y, X). */
11172 if (TREE_CODE (arg0) == BIT_AND_EXPR
11173 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11174 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11175 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11176 /* X | (X & Y) is (Y, X). */
11177 if (TREE_CODE (arg1) == BIT_AND_EXPR
11178 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11179 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11180 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11181 /* X | (Y & X) is (Y, X). */
11182 if (TREE_CODE (arg1) == BIT_AND_EXPR
11183 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11184 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11185 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11187 /* (X & ~Y) | (~X & Y) is X ^ Y */
11188 if (TREE_CODE (arg0) == BIT_AND_EXPR
11189 && TREE_CODE (arg1) == BIT_AND_EXPR)
11191 tree a0, a1, l0, l1, n0, n1;
11193 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11194 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11196 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11197 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11199 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11200 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11202 if ((operand_equal_p (n0, a0, 0)
11203 && operand_equal_p (n1, a1, 0))
11204 || (operand_equal_p (n0, a1, 0)
11205 && operand_equal_p (n1, a0, 0)))
11206 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11209 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11210 if (t1 != NULL_TREE)
11211 return t1;
11213 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11215 This results in more efficient code for machines without a NAND
11216 instruction. Combine will canonicalize to the first form
11217 which will allow use of NAND instructions provided by the
11218 backend if they exist. */
11219 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11220 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11222 return
11223 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11224 build2 (BIT_AND_EXPR, type,
11225 fold_convert_loc (loc, type,
11226 TREE_OPERAND (arg0, 0)),
11227 fold_convert_loc (loc, type,
11228 TREE_OPERAND (arg1, 0))));
11231 /* See if this can be simplified into a rotate first. If that
11232 is unsuccessful continue in the association code. */
11233 goto bit_rotate;
11235 case BIT_XOR_EXPR:
11236 if (integer_zerop (arg1))
11237 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11238 if (integer_all_onesp (arg1))
11239 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11240 if (operand_equal_p (arg0, arg1, 0))
11241 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11243 /* ~X ^ X is -1. */
11244 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11245 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11247 t1 = build_zero_cst (type);
11248 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11249 return omit_one_operand_loc (loc, type, t1, arg1);
11252 /* X ^ ~X is -1. */
11253 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11254 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11256 t1 = build_zero_cst (type);
11257 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11258 return omit_one_operand_loc (loc, type, t1, arg0);
11261 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11262 with a constant, and the two constants have no bits in common,
11263 we should treat this as a BIT_IOR_EXPR since this may produce more
11264 simplifications. */
11265 if (TREE_CODE (arg0) == BIT_AND_EXPR
11266 && TREE_CODE (arg1) == BIT_AND_EXPR
11267 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11268 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11269 && integer_zerop (const_binop (BIT_AND_EXPR,
11270 TREE_OPERAND (arg0, 1),
11271 TREE_OPERAND (arg1, 1))))
11273 code = BIT_IOR_EXPR;
11274 goto bit_ior;
11277 /* (X | Y) ^ X -> Y & ~ X*/
11278 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11279 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11281 tree t2 = TREE_OPERAND (arg0, 1);
11282 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11283 arg1);
11284 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11285 fold_convert_loc (loc, type, t2),
11286 fold_convert_loc (loc, type, t1));
11287 return t1;
11290 /* (Y | X) ^ X -> Y & ~ X*/
11291 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11292 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11294 tree t2 = TREE_OPERAND (arg0, 0);
11295 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11296 arg1);
11297 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11298 fold_convert_loc (loc, type, t2),
11299 fold_convert_loc (loc, type, t1));
11300 return t1;
11303 /* X ^ (X | Y) -> Y & ~ X*/
11304 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11305 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11307 tree t2 = TREE_OPERAND (arg1, 1);
11308 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11309 arg0);
11310 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11311 fold_convert_loc (loc, type, t2),
11312 fold_convert_loc (loc, type, t1));
11313 return t1;
11316 /* X ^ (Y | X) -> Y & ~ X*/
11317 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11318 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11320 tree t2 = TREE_OPERAND (arg1, 0);
11321 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11322 arg0);
11323 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11324 fold_convert_loc (loc, type, t2),
11325 fold_convert_loc (loc, type, t1));
11326 return t1;
11329 /* Convert ~X ^ ~Y to X ^ Y. */
11330 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11331 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11332 return fold_build2_loc (loc, code, type,
11333 fold_convert_loc (loc, type,
11334 TREE_OPERAND (arg0, 0)),
11335 fold_convert_loc (loc, type,
11336 TREE_OPERAND (arg1, 0)));
11338 /* Convert ~X ^ C to X ^ ~C. */
11339 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11340 && TREE_CODE (arg1) == INTEGER_CST)
11341 return fold_build2_loc (loc, code, type,
11342 fold_convert_loc (loc, type,
11343 TREE_OPERAND (arg0, 0)),
11344 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11346 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11347 if (TREE_CODE (arg0) == BIT_AND_EXPR
11348 && integer_onep (TREE_OPERAND (arg0, 1))
11349 && integer_onep (arg1))
11350 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11351 build_zero_cst (TREE_TYPE (arg0)));
11353 /* Fold (X & Y) ^ Y as ~X & Y. */
11354 if (TREE_CODE (arg0) == BIT_AND_EXPR
11355 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11357 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11358 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11359 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11360 fold_convert_loc (loc, type, arg1));
11362 /* Fold (X & Y) ^ X as ~Y & X. */
11363 if (TREE_CODE (arg0) == BIT_AND_EXPR
11364 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11365 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11367 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11368 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11369 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11370 fold_convert_loc (loc, type, arg1));
11372 /* Fold X ^ (X & Y) as X & ~Y. */
11373 if (TREE_CODE (arg1) == BIT_AND_EXPR
11374 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11376 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11377 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11378 fold_convert_loc (loc, type, arg0),
11379 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11381 /* Fold X ^ (Y & X) as ~Y & X. */
11382 if (TREE_CODE (arg1) == BIT_AND_EXPR
11383 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11384 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11386 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11387 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11388 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11389 fold_convert_loc (loc, type, arg0));
11392 /* See if this can be simplified into a rotate first. If that
11393 is unsuccessful continue in the association code. */
11394 goto bit_rotate;
11396 case BIT_AND_EXPR:
11397 if (integer_all_onesp (arg1))
11398 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11399 if (integer_zerop (arg1))
11400 return omit_one_operand_loc (loc, type, arg1, arg0);
11401 if (operand_equal_p (arg0, arg1, 0))
11402 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11404 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11405 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11406 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11407 || (TREE_CODE (arg0) == EQ_EXPR
11408 && integer_zerop (TREE_OPERAND (arg0, 1))))
11409 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11410 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11412 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11413 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11414 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11415 || (TREE_CODE (arg1) == EQ_EXPR
11416 && integer_zerop (TREE_OPERAND (arg1, 1))))
11417 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11418 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11420 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11421 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11422 && TREE_CODE (arg1) == INTEGER_CST
11423 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11425 tree tmp1 = fold_convert_loc (loc, type, arg1);
11426 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11427 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11428 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11429 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11430 return
11431 fold_convert_loc (loc, type,
11432 fold_build2_loc (loc, BIT_IOR_EXPR,
11433 type, tmp2, tmp3));
11436 /* (X | Y) & Y is (X, Y). */
11437 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11438 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11439 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11440 /* (X | Y) & X is (Y, X). */
11441 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11442 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11443 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11444 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11445 /* X & (X | Y) is (Y, X). */
11446 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11447 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11448 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11449 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11450 /* X & (Y | X) is (Y, X). */
11451 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11452 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11453 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11454 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11456 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11457 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11458 && integer_onep (TREE_OPERAND (arg0, 1))
11459 && integer_onep (arg1))
11461 tree tem2;
11462 tem = TREE_OPERAND (arg0, 0);
11463 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11464 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11465 tem, tem2);
11466 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11467 build_zero_cst (TREE_TYPE (tem)));
11469 /* Fold ~X & 1 as (X & 1) == 0. */
11470 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11471 && integer_onep (arg1))
11473 tree tem2;
11474 tem = TREE_OPERAND (arg0, 0);
11475 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11476 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11477 tem, tem2);
11478 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11479 build_zero_cst (TREE_TYPE (tem)));
11481 /* Fold !X & 1 as X == 0. */
11482 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11483 && integer_onep (arg1))
11485 tem = TREE_OPERAND (arg0, 0);
11486 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11487 build_zero_cst (TREE_TYPE (tem)));
11490 /* Fold (X ^ Y) & Y as ~X & Y. */
11491 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11492 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11494 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11495 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11496 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11497 fold_convert_loc (loc, type, arg1));
11499 /* Fold (X ^ Y) & X as ~Y & X. */
11500 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11501 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11502 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11504 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11505 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11506 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11507 fold_convert_loc (loc, type, arg1));
11509 /* Fold X & (X ^ Y) as X & ~Y. */
11510 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11511 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11513 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11514 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11515 fold_convert_loc (loc, type, arg0),
11516 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11518 /* Fold X & (Y ^ X) as ~Y & X. */
11519 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11520 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11521 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11523 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11524 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11525 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11526 fold_convert_loc (loc, type, arg0));
11529 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11530 multiple of 1 << CST. */
11531 if (TREE_CODE (arg1) == INTEGER_CST)
11533 double_int cst1 = tree_to_double_int (arg1);
11534 double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11535 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11536 if ((cst1 & ncst1) == ncst1
11537 && multiple_of_p (type, arg0,
11538 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11539 return fold_convert_loc (loc, type, arg0);
11542 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11543 bits from CST2. */
11544 if (TREE_CODE (arg1) == INTEGER_CST
11545 && TREE_CODE (arg0) == MULT_EXPR
11546 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11548 int arg1tz
11549 = tree_to_double_int (TREE_OPERAND (arg0, 1)).trailing_zeros ();
11550 if (arg1tz > 0)
11552 double_int arg1mask, masked;
11553 arg1mask = ~double_int::mask (arg1tz);
11554 arg1mask = arg1mask.ext (TYPE_PRECISION (type),
11555 TYPE_UNSIGNED (type));
11556 masked = arg1mask & tree_to_double_int (arg1);
11557 if (masked.is_zero ())
11558 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11559 arg0, arg1);
11560 else if (masked != tree_to_double_int (arg1))
11561 return fold_build2_loc (loc, code, type, op0,
11562 double_int_to_tree (type, masked));
11566 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11567 ((A & N) + B) & M -> (A + B) & M
11568 Similarly if (N & M) == 0,
11569 ((A | N) + B) & M -> (A + B) & M
11570 and for - instead of + (or unary - instead of +)
11571 and/or ^ instead of |.
11572 If B is constant and (B & M) == 0, fold into A & M. */
11573 if (host_integerp (arg1, 1))
11575 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11576 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11577 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11578 && (TREE_CODE (arg0) == PLUS_EXPR
11579 || TREE_CODE (arg0) == MINUS_EXPR
11580 || TREE_CODE (arg0) == NEGATE_EXPR)
11581 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11582 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11584 tree pmop[2];
11585 int which = 0;
11586 unsigned HOST_WIDE_INT cst0;
11588 /* Now we know that arg0 is (C + D) or (C - D) or
11589 -C and arg1 (M) is == (1LL << cst) - 1.
11590 Store C into PMOP[0] and D into PMOP[1]. */
11591 pmop[0] = TREE_OPERAND (arg0, 0);
11592 pmop[1] = NULL;
11593 if (TREE_CODE (arg0) != NEGATE_EXPR)
11595 pmop[1] = TREE_OPERAND (arg0, 1);
11596 which = 1;
11599 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11600 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11601 & cst1) != cst1)
11602 which = -1;
11604 for (; which >= 0; which--)
11605 switch (TREE_CODE (pmop[which]))
11607 case BIT_AND_EXPR:
11608 case BIT_IOR_EXPR:
11609 case BIT_XOR_EXPR:
11610 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11611 != INTEGER_CST)
11612 break;
11613 /* tree_low_cst not used, because we don't care about
11614 the upper bits. */
11615 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11616 cst0 &= cst1;
11617 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11619 if (cst0 != cst1)
11620 break;
11622 else if (cst0 != 0)
11623 break;
11624 /* If C or D is of the form (A & N) where
11625 (N & M) == M, or of the form (A | N) or
11626 (A ^ N) where (N & M) == 0, replace it with A. */
11627 pmop[which] = TREE_OPERAND (pmop[which], 0);
11628 break;
11629 case INTEGER_CST:
11630 /* If C or D is a N where (N & M) == 0, it can be
11631 omitted (assumed 0). */
11632 if ((TREE_CODE (arg0) == PLUS_EXPR
11633 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11634 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11635 pmop[which] = NULL;
11636 break;
11637 default:
11638 break;
11641 /* Only build anything new if we optimized one or both arguments
11642 above. */
11643 if (pmop[0] != TREE_OPERAND (arg0, 0)
11644 || (TREE_CODE (arg0) != NEGATE_EXPR
11645 && pmop[1] != TREE_OPERAND (arg0, 1)))
11647 tree utype = TREE_TYPE (arg0);
11648 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11650 /* Perform the operations in a type that has defined
11651 overflow behavior. */
11652 utype = unsigned_type_for (TREE_TYPE (arg0));
11653 if (pmop[0] != NULL)
11654 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11655 if (pmop[1] != NULL)
11656 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11659 if (TREE_CODE (arg0) == NEGATE_EXPR)
11660 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11661 else if (TREE_CODE (arg0) == PLUS_EXPR)
11663 if (pmop[0] != NULL && pmop[1] != NULL)
11664 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11665 pmop[0], pmop[1]);
11666 else if (pmop[0] != NULL)
11667 tem = pmop[0];
11668 else if (pmop[1] != NULL)
11669 tem = pmop[1];
11670 else
11671 return build_int_cst (type, 0);
11673 else if (pmop[0] == NULL)
11674 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11675 else
11676 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11677 pmop[0], pmop[1]);
11678 /* TEM is now the new binary +, - or unary - replacement. */
11679 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11680 fold_convert_loc (loc, utype, arg1));
11681 return fold_convert_loc (loc, type, tem);
11686 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11687 if (t1 != NULL_TREE)
11688 return t1;
11689 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11690 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11691 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11693 unsigned int prec
11694 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11696 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11697 && (~TREE_INT_CST_LOW (arg1)
11698 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11699 return
11700 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11703 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11705 This results in more efficient code for machines without a NOR
11706 instruction. Combine will canonicalize to the first form
11707 which will allow use of NOR instructions provided by the
11708 backend if they exist. */
11709 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11710 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11712 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11713 build2 (BIT_IOR_EXPR, type,
11714 fold_convert_loc (loc, type,
11715 TREE_OPERAND (arg0, 0)),
11716 fold_convert_loc (loc, type,
11717 TREE_OPERAND (arg1, 0))));
11720 /* If arg0 is derived from the address of an object or function, we may
11721 be able to fold this expression using the object or function's
11722 alignment. */
11723 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11725 unsigned HOST_WIDE_INT modulus, residue;
11726 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11728 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11729 integer_onep (arg1));
11731 /* This works because modulus is a power of 2. If this weren't the
11732 case, we'd have to replace it by its greatest power-of-2
11733 divisor: modulus & -modulus. */
11734 if (low < modulus)
11735 return build_int_cst (type, residue & low);
11738 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11739 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11740 if the new mask might be further optimized. */
11741 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11742 || TREE_CODE (arg0) == RSHIFT_EXPR)
11743 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11744 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11745 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11746 < TYPE_PRECISION (TREE_TYPE (arg0))
11747 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11748 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11750 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11751 unsigned HOST_WIDE_INT mask
11752 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11753 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11754 tree shift_type = TREE_TYPE (arg0);
11756 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11757 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11758 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11759 && TYPE_PRECISION (TREE_TYPE (arg0))
11760 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11762 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11763 tree arg00 = TREE_OPERAND (arg0, 0);
11764 /* See if more bits can be proven as zero because of
11765 zero extension. */
11766 if (TREE_CODE (arg00) == NOP_EXPR
11767 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11769 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11770 if (TYPE_PRECISION (inner_type)
11771 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11772 && TYPE_PRECISION (inner_type) < prec)
11774 prec = TYPE_PRECISION (inner_type);
11775 /* See if we can shorten the right shift. */
11776 if (shiftc < prec)
11777 shift_type = inner_type;
11780 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11781 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11782 zerobits <<= prec - shiftc;
11783 /* For arithmetic shift if sign bit could be set, zerobits
11784 can contain actually sign bits, so no transformation is
11785 possible, unless MASK masks them all away. In that
11786 case the shift needs to be converted into logical shift. */
11787 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11788 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11790 if ((mask & zerobits) == 0)
11791 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11792 else
11793 zerobits = 0;
11797 /* ((X << 16) & 0xff00) is (X, 0). */
11798 if ((mask & zerobits) == mask)
11799 return omit_one_operand_loc (loc, type,
11800 build_int_cst (type, 0), arg0);
11802 newmask = mask | zerobits;
11803 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11805 unsigned int prec;
11807 /* Only do the transformation if NEWMASK is some integer
11808 mode's mask. */
11809 for (prec = BITS_PER_UNIT;
11810 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11811 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11812 break;
11813 if (prec < HOST_BITS_PER_WIDE_INT
11814 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11816 tree newmaskt;
11818 if (shift_type != TREE_TYPE (arg0))
11820 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11821 fold_convert_loc (loc, shift_type,
11822 TREE_OPERAND (arg0, 0)),
11823 TREE_OPERAND (arg0, 1));
11824 tem = fold_convert_loc (loc, type, tem);
11826 else
11827 tem = op0;
11828 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11829 if (!tree_int_cst_equal (newmaskt, arg1))
11830 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11835 goto associate;
11837 case RDIV_EXPR:
11838 /* Don't touch a floating-point divide by zero unless the mode
11839 of the constant can represent infinity. */
11840 if (TREE_CODE (arg1) == REAL_CST
11841 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11842 && real_zerop (arg1))
11843 return NULL_TREE;
11845 /* Optimize A / A to 1.0 if we don't care about
11846 NaNs or Infinities. Skip the transformation
11847 for non-real operands. */
11848 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11849 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11850 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11851 && operand_equal_p (arg0, arg1, 0))
11853 tree r = build_real (TREE_TYPE (arg0), dconst1);
11855 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11858 /* The complex version of the above A / A optimization. */
11859 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11860 && operand_equal_p (arg0, arg1, 0))
11862 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11863 if (! HONOR_NANS (TYPE_MODE (elem_type))
11864 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11866 tree r = build_real (elem_type, dconst1);
11867 /* omit_two_operands will call fold_convert for us. */
11868 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11872 /* (-A) / (-B) -> A / B */
11873 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11874 return fold_build2_loc (loc, RDIV_EXPR, type,
11875 TREE_OPERAND (arg0, 0),
11876 negate_expr (arg1));
11877 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11878 return fold_build2_loc (loc, RDIV_EXPR, type,
11879 negate_expr (arg0),
11880 TREE_OPERAND (arg1, 0));
11882 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11883 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11884 && real_onep (arg1))
11885 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11887 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11888 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11889 && real_minus_onep (arg1))
11890 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11891 negate_expr (arg0)));
11893 /* If ARG1 is a constant, we can convert this to a multiply by the
11894 reciprocal. This does not have the same rounding properties,
11895 so only do this if -freciprocal-math. We can actually
11896 always safely do it if ARG1 is a power of two, but it's hard to
11897 tell if it is or not in a portable manner. */
11898 if (optimize
11899 && (TREE_CODE (arg1) == REAL_CST
11900 || (TREE_CODE (arg1) == COMPLEX_CST
11901 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11902 || (TREE_CODE (arg1) == VECTOR_CST
11903 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11905 if (flag_reciprocal_math
11906 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11907 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11908 /* Find the reciprocal if optimizing and the result is exact.
11909 TODO: Complex reciprocal not implemented. */
11910 if (TREE_CODE (arg1) != COMPLEX_CST)
11912 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11914 if (inverse)
11915 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11918 /* Convert A/B/C to A/(B*C). */
11919 if (flag_reciprocal_math
11920 && TREE_CODE (arg0) == RDIV_EXPR)
11921 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11922 fold_build2_loc (loc, MULT_EXPR, type,
11923 TREE_OPERAND (arg0, 1), arg1));
11925 /* Convert A/(B/C) to (A/B)*C. */
11926 if (flag_reciprocal_math
11927 && TREE_CODE (arg1) == RDIV_EXPR)
11928 return fold_build2_loc (loc, MULT_EXPR, type,
11929 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11930 TREE_OPERAND (arg1, 0)),
11931 TREE_OPERAND (arg1, 1));
11933 /* Convert C1/(X*C2) into (C1/C2)/X. */
11934 if (flag_reciprocal_math
11935 && TREE_CODE (arg1) == MULT_EXPR
11936 && TREE_CODE (arg0) == REAL_CST
11937 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11939 tree tem = const_binop (RDIV_EXPR, arg0,
11940 TREE_OPERAND (arg1, 1));
11941 if (tem)
11942 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11943 TREE_OPERAND (arg1, 0));
11946 if (flag_unsafe_math_optimizations)
11948 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11949 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11951 /* Optimize sin(x)/cos(x) as tan(x). */
11952 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11953 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11954 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11955 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11956 CALL_EXPR_ARG (arg1, 0), 0))
11958 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11960 if (tanfn != NULL_TREE)
11961 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11964 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11965 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11966 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11967 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11968 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11969 CALL_EXPR_ARG (arg1, 0), 0))
11971 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11973 if (tanfn != NULL_TREE)
11975 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11976 CALL_EXPR_ARG (arg0, 0));
11977 return fold_build2_loc (loc, RDIV_EXPR, type,
11978 build_real (type, dconst1), tmp);
11982 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11983 NaNs or Infinities. */
11984 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11985 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11986 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11988 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11989 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11991 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11992 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11993 && operand_equal_p (arg00, arg01, 0))
11995 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11997 if (cosfn != NULL_TREE)
11998 return build_call_expr_loc (loc, cosfn, 1, arg00);
12002 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12003 NaNs or Infinities. */
12004 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12005 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12006 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12008 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12009 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12011 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12012 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12013 && operand_equal_p (arg00, arg01, 0))
12015 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12017 if (cosfn != NULL_TREE)
12019 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12020 return fold_build2_loc (loc, RDIV_EXPR, type,
12021 build_real (type, dconst1),
12022 tmp);
12027 /* Optimize pow(x,c)/x as pow(x,c-1). */
12028 if (fcode0 == BUILT_IN_POW
12029 || fcode0 == BUILT_IN_POWF
12030 || fcode0 == BUILT_IN_POWL)
12032 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12033 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12034 if (TREE_CODE (arg01) == REAL_CST
12035 && !TREE_OVERFLOW (arg01)
12036 && operand_equal_p (arg1, arg00, 0))
12038 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12039 REAL_VALUE_TYPE c;
12040 tree arg;
12042 c = TREE_REAL_CST (arg01);
12043 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12044 arg = build_real (type, c);
12045 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12049 /* Optimize a/root(b/c) into a*root(c/b). */
12050 if (BUILTIN_ROOT_P (fcode1))
12052 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12054 if (TREE_CODE (rootarg) == RDIV_EXPR)
12056 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12057 tree b = TREE_OPERAND (rootarg, 0);
12058 tree c = TREE_OPERAND (rootarg, 1);
12060 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12062 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12063 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12067 /* Optimize x/expN(y) into x*expN(-y). */
12068 if (BUILTIN_EXPONENT_P (fcode1))
12070 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12071 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12072 arg1 = build_call_expr_loc (loc,
12073 expfn, 1,
12074 fold_convert_loc (loc, type, arg));
12075 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12078 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12079 if (fcode1 == BUILT_IN_POW
12080 || fcode1 == BUILT_IN_POWF
12081 || fcode1 == BUILT_IN_POWL)
12083 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12084 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12085 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12086 tree neg11 = fold_convert_loc (loc, type,
12087 negate_expr (arg11));
12088 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12089 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12092 return NULL_TREE;
12094 case TRUNC_DIV_EXPR:
12095 /* Optimize (X & (-A)) / A where A is a power of 2,
12096 to X >> log2(A) */
12097 if (TREE_CODE (arg0) == BIT_AND_EXPR
12098 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12099 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12101 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12102 arg1, TREE_OPERAND (arg0, 1));
12103 if (sum && integer_zerop (sum)) {
12104 unsigned long pow2;
12106 if (TREE_INT_CST_LOW (arg1))
12107 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12108 else
12109 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12110 + HOST_BITS_PER_WIDE_INT;
12112 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12113 TREE_OPERAND (arg0, 0),
12114 build_int_cst (integer_type_node, pow2));
12118 /* Fall through */
12120 case FLOOR_DIV_EXPR:
12121 /* Simplify A / (B << N) where A and B are positive and B is
12122 a power of 2, to A >> (N + log2(B)). */
12123 strict_overflow_p = false;
12124 if (TREE_CODE (arg1) == LSHIFT_EXPR
12125 && (TYPE_UNSIGNED (type)
12126 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12128 tree sval = TREE_OPERAND (arg1, 0);
12129 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12131 tree sh_cnt = TREE_OPERAND (arg1, 1);
12132 unsigned long pow2;
12134 if (TREE_INT_CST_LOW (sval))
12135 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12136 else
12137 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12138 + HOST_BITS_PER_WIDE_INT;
12140 if (strict_overflow_p)
12141 fold_overflow_warning (("assuming signed overflow does not "
12142 "occur when simplifying A / (B << N)"),
12143 WARN_STRICT_OVERFLOW_MISC);
12145 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12146 sh_cnt,
12147 build_int_cst (TREE_TYPE (sh_cnt),
12148 pow2));
12149 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12150 fold_convert_loc (loc, type, arg0), sh_cnt);
12154 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12155 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12156 if (INTEGRAL_TYPE_P (type)
12157 && TYPE_UNSIGNED (type)
12158 && code == FLOOR_DIV_EXPR)
12159 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12161 /* Fall through */
12163 case ROUND_DIV_EXPR:
12164 case CEIL_DIV_EXPR:
12165 case EXACT_DIV_EXPR:
12166 if (integer_onep (arg1))
12167 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12168 if (integer_zerop (arg1))
12169 return NULL_TREE;
12170 /* X / -1 is -X. */
12171 if (!TYPE_UNSIGNED (type)
12172 && TREE_CODE (arg1) == INTEGER_CST
12173 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12174 && TREE_INT_CST_HIGH (arg1) == -1)
12175 return fold_convert_loc (loc, type, negate_expr (arg0));
12177 /* Convert -A / -B to A / B when the type is signed and overflow is
12178 undefined. */
12179 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12180 && TREE_CODE (arg0) == NEGATE_EXPR
12181 && negate_expr_p (arg1))
12183 if (INTEGRAL_TYPE_P (type))
12184 fold_overflow_warning (("assuming signed overflow does not occur "
12185 "when distributing negation across "
12186 "division"),
12187 WARN_STRICT_OVERFLOW_MISC);
12188 return fold_build2_loc (loc, code, type,
12189 fold_convert_loc (loc, type,
12190 TREE_OPERAND (arg0, 0)),
12191 fold_convert_loc (loc, type,
12192 negate_expr (arg1)));
12194 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12195 && TREE_CODE (arg1) == NEGATE_EXPR
12196 && negate_expr_p (arg0))
12198 if (INTEGRAL_TYPE_P (type))
12199 fold_overflow_warning (("assuming signed overflow does not occur "
12200 "when distributing negation across "
12201 "division"),
12202 WARN_STRICT_OVERFLOW_MISC);
12203 return fold_build2_loc (loc, code, type,
12204 fold_convert_loc (loc, type,
12205 negate_expr (arg0)),
12206 fold_convert_loc (loc, type,
12207 TREE_OPERAND (arg1, 0)));
12210 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12211 operation, EXACT_DIV_EXPR.
12213 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12214 At one time others generated faster code, it's not clear if they do
12215 after the last round to changes to the DIV code in expmed.c. */
12216 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12217 && multiple_of_p (type, arg0, arg1))
12218 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12220 strict_overflow_p = false;
12221 if (TREE_CODE (arg1) == INTEGER_CST
12222 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12223 &strict_overflow_p)))
12225 if (strict_overflow_p)
12226 fold_overflow_warning (("assuming signed overflow does not occur "
12227 "when simplifying division"),
12228 WARN_STRICT_OVERFLOW_MISC);
12229 return fold_convert_loc (loc, type, tem);
12232 return NULL_TREE;
12234 case CEIL_MOD_EXPR:
12235 case FLOOR_MOD_EXPR:
12236 case ROUND_MOD_EXPR:
12237 case TRUNC_MOD_EXPR:
12238 /* X % 1 is always zero, but be sure to preserve any side
12239 effects in X. */
12240 if (integer_onep (arg1))
12241 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12243 /* X % 0, return X % 0 unchanged so that we can get the
12244 proper warnings and errors. */
12245 if (integer_zerop (arg1))
12246 return NULL_TREE;
12248 /* 0 % X is always zero, but be sure to preserve any side
12249 effects in X. Place this after checking for X == 0. */
12250 if (integer_zerop (arg0))
12251 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12253 /* X % -1 is zero. */
12254 if (!TYPE_UNSIGNED (type)
12255 && TREE_CODE (arg1) == INTEGER_CST
12256 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12257 && TREE_INT_CST_HIGH (arg1) == -1)
12258 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12260 /* X % -C is the same as X % C. */
12261 if (code == TRUNC_MOD_EXPR
12262 && !TYPE_UNSIGNED (type)
12263 && TREE_CODE (arg1) == INTEGER_CST
12264 && !TREE_OVERFLOW (arg1)
12265 && TREE_INT_CST_HIGH (arg1) < 0
12266 && !TYPE_OVERFLOW_TRAPS (type)
12267 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12268 && !sign_bit_p (arg1, arg1))
12269 return fold_build2_loc (loc, code, type,
12270 fold_convert_loc (loc, type, arg0),
12271 fold_convert_loc (loc, type,
12272 negate_expr (arg1)));
12274 /* X % -Y is the same as X % Y. */
12275 if (code == TRUNC_MOD_EXPR
12276 && !TYPE_UNSIGNED (type)
12277 && TREE_CODE (arg1) == NEGATE_EXPR
12278 && !TYPE_OVERFLOW_TRAPS (type))
12279 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12280 fold_convert_loc (loc, type,
12281 TREE_OPERAND (arg1, 0)));
12283 strict_overflow_p = false;
12284 if (TREE_CODE (arg1) == INTEGER_CST
12285 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12286 &strict_overflow_p)))
12288 if (strict_overflow_p)
12289 fold_overflow_warning (("assuming signed overflow does not occur "
12290 "when simplifying modulus"),
12291 WARN_STRICT_OVERFLOW_MISC);
12292 return fold_convert_loc (loc, type, tem);
12295 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12296 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12297 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12298 && (TYPE_UNSIGNED (type)
12299 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12301 tree c = arg1;
12302 /* Also optimize A % (C << N) where C is a power of 2,
12303 to A & ((C << N) - 1). */
12304 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12305 c = TREE_OPERAND (arg1, 0);
12307 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12309 tree mask
12310 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12311 build_int_cst (TREE_TYPE (arg1), 1));
12312 if (strict_overflow_p)
12313 fold_overflow_warning (("assuming signed overflow does not "
12314 "occur when simplifying "
12315 "X % (power of two)"),
12316 WARN_STRICT_OVERFLOW_MISC);
12317 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12318 fold_convert_loc (loc, type, arg0),
12319 fold_convert_loc (loc, type, mask));
12323 return NULL_TREE;
12325 case LROTATE_EXPR:
12326 case RROTATE_EXPR:
12327 if (integer_all_onesp (arg0))
12328 return omit_one_operand_loc (loc, type, arg0, arg1);
12329 goto shift;
12331 case RSHIFT_EXPR:
12332 /* Optimize -1 >> x for arithmetic right shifts. */
12333 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12334 && tree_expr_nonnegative_p (arg1))
12335 return omit_one_operand_loc (loc, type, arg0, arg1);
12336 /* ... fall through ... */
12338 case LSHIFT_EXPR:
12339 shift:
12340 if (integer_zerop (arg1))
12341 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12342 if (integer_zerop (arg0))
12343 return omit_one_operand_loc (loc, type, arg0, arg1);
12345 /* Since negative shift count is not well-defined,
12346 don't try to compute it in the compiler. */
12347 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12348 return NULL_TREE;
12350 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12351 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12352 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12353 && host_integerp (TREE_OPERAND (arg0, 1), false)
12354 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12356 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12357 + TREE_INT_CST_LOW (arg1));
12359 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12360 being well defined. */
12361 if (low >= TYPE_PRECISION (type))
12363 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12364 low = low % TYPE_PRECISION (type);
12365 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12366 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12367 TREE_OPERAND (arg0, 0));
12368 else
12369 low = TYPE_PRECISION (type) - 1;
12372 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12373 build_int_cst (type, low));
12376 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12377 into x & ((unsigned)-1 >> c) for unsigned types. */
12378 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12379 || (TYPE_UNSIGNED (type)
12380 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12381 && host_integerp (arg1, false)
12382 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12383 && host_integerp (TREE_OPERAND (arg0, 1), false)
12384 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12386 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12387 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12388 tree lshift;
12389 tree arg00;
12391 if (low0 == low1)
12393 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12395 lshift = build_int_cst (type, -1);
12396 lshift = int_const_binop (code, lshift, arg1);
12398 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12402 /* Rewrite an LROTATE_EXPR by a constant into an
12403 RROTATE_EXPR by a new constant. */
12404 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12406 tree tem = build_int_cst (TREE_TYPE (arg1),
12407 TYPE_PRECISION (type));
12408 tem = const_binop (MINUS_EXPR, tem, arg1);
12409 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12412 /* If we have a rotate of a bit operation with the rotate count and
12413 the second operand of the bit operation both constant,
12414 permute the two operations. */
12415 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12416 && (TREE_CODE (arg0) == BIT_AND_EXPR
12417 || TREE_CODE (arg0) == BIT_IOR_EXPR
12418 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12419 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12420 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12421 fold_build2_loc (loc, code, type,
12422 TREE_OPERAND (arg0, 0), arg1),
12423 fold_build2_loc (loc, code, type,
12424 TREE_OPERAND (arg0, 1), arg1));
12426 /* Two consecutive rotates adding up to the precision of the
12427 type can be ignored. */
12428 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12429 && TREE_CODE (arg0) == RROTATE_EXPR
12430 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12431 && TREE_INT_CST_HIGH (arg1) == 0
12432 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12433 && ((TREE_INT_CST_LOW (arg1)
12434 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12435 == (unsigned int) TYPE_PRECISION (type)))
12436 return TREE_OPERAND (arg0, 0);
12438 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12439 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12440 if the latter can be further optimized. */
12441 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12442 && TREE_CODE (arg0) == BIT_AND_EXPR
12443 && TREE_CODE (arg1) == INTEGER_CST
12444 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12446 tree mask = fold_build2_loc (loc, code, type,
12447 fold_convert_loc (loc, type,
12448 TREE_OPERAND (arg0, 1)),
12449 arg1);
12450 tree shift = fold_build2_loc (loc, code, type,
12451 fold_convert_loc (loc, type,
12452 TREE_OPERAND (arg0, 0)),
12453 arg1);
12454 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12455 if (tem)
12456 return tem;
12459 return NULL_TREE;
12461 case MIN_EXPR:
12462 if (operand_equal_p (arg0, arg1, 0))
12463 return omit_one_operand_loc (loc, type, arg0, arg1);
12464 if (INTEGRAL_TYPE_P (type)
12465 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12466 return omit_one_operand_loc (loc, type, arg1, arg0);
12467 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12468 if (tem)
12469 return tem;
12470 goto associate;
12472 case MAX_EXPR:
12473 if (operand_equal_p (arg0, arg1, 0))
12474 return omit_one_operand_loc (loc, type, arg0, arg1);
12475 if (INTEGRAL_TYPE_P (type)
12476 && TYPE_MAX_VALUE (type)
12477 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12478 return omit_one_operand_loc (loc, type, arg1, arg0);
12479 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12480 if (tem)
12481 return tem;
12482 goto associate;
12484 case TRUTH_ANDIF_EXPR:
12485 /* Note that the operands of this must be ints
12486 and their values must be 0 or 1.
12487 ("true" is a fixed value perhaps depending on the language.) */
12488 /* If first arg is constant zero, return it. */
12489 if (integer_zerop (arg0))
12490 return fold_convert_loc (loc, type, arg0);
12491 case TRUTH_AND_EXPR:
12492 /* If either arg is constant true, drop it. */
12493 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12494 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12495 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12496 /* Preserve sequence points. */
12497 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12498 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12499 /* If second arg is constant zero, result is zero, but first arg
12500 must be evaluated. */
12501 if (integer_zerop (arg1))
12502 return omit_one_operand_loc (loc, type, arg1, arg0);
12503 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12504 case will be handled here. */
12505 if (integer_zerop (arg0))
12506 return omit_one_operand_loc (loc, type, arg0, arg1);
12508 /* !X && X is always false. */
12509 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12510 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12511 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12512 /* X && !X is always false. */
12513 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12514 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12515 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12517 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12518 means A >= Y && A != MAX, but in this case we know that
12519 A < X <= MAX. */
12521 if (!TREE_SIDE_EFFECTS (arg0)
12522 && !TREE_SIDE_EFFECTS (arg1))
12524 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12525 if (tem && !operand_equal_p (tem, arg0, 0))
12526 return fold_build2_loc (loc, code, type, tem, arg1);
12528 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12529 if (tem && !operand_equal_p (tem, arg1, 0))
12530 return fold_build2_loc (loc, code, type, arg0, tem);
12533 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12534 != NULL_TREE)
12535 return tem;
12537 return NULL_TREE;
12539 case TRUTH_ORIF_EXPR:
12540 /* Note that the operands of this must be ints
12541 and their values must be 0 or true.
12542 ("true" is a fixed value perhaps depending on the language.) */
12543 /* If first arg is constant true, return it. */
12544 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12545 return fold_convert_loc (loc, type, arg0);
12546 case TRUTH_OR_EXPR:
12547 /* If either arg is constant zero, drop it. */
12548 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12549 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12550 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12551 /* Preserve sequence points. */
12552 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12553 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12554 /* If second arg is constant true, result is true, but we must
12555 evaluate first arg. */
12556 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12557 return omit_one_operand_loc (loc, type, arg1, arg0);
12558 /* Likewise for first arg, but note this only occurs here for
12559 TRUTH_OR_EXPR. */
12560 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12561 return omit_one_operand_loc (loc, type, arg0, arg1);
12563 /* !X || X is always true. */
12564 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12565 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12566 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12567 /* X || !X is always true. */
12568 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12569 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12570 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12572 /* (X && !Y) || (!X && Y) is X ^ Y */
12573 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12574 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12576 tree a0, a1, l0, l1, n0, n1;
12578 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12579 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12581 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12582 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12584 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12585 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12587 if ((operand_equal_p (n0, a0, 0)
12588 && operand_equal_p (n1, a1, 0))
12589 || (operand_equal_p (n0, a1, 0)
12590 && operand_equal_p (n1, a0, 0)))
12591 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12594 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12595 != NULL_TREE)
12596 return tem;
12598 return NULL_TREE;
12600 case TRUTH_XOR_EXPR:
12601 /* If the second arg is constant zero, drop it. */
12602 if (integer_zerop (arg1))
12603 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12604 /* If the second arg is constant true, this is a logical inversion. */
12605 if (integer_onep (arg1))
12607 /* Only call invert_truthvalue if operand is a truth value. */
12608 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12609 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12610 else
12611 tem = invert_truthvalue_loc (loc, arg0);
12612 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12614 /* Identical arguments cancel to zero. */
12615 if (operand_equal_p (arg0, arg1, 0))
12616 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12618 /* !X ^ X is always true. */
12619 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12620 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12621 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12623 /* X ^ !X is always true. */
12624 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12625 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12626 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12628 return NULL_TREE;
12630 case EQ_EXPR:
12631 case NE_EXPR:
12632 STRIP_NOPS (arg0);
12633 STRIP_NOPS (arg1);
12635 tem = fold_comparison (loc, code, type, op0, op1);
12636 if (tem != NULL_TREE)
12637 return tem;
12639 /* bool_var != 0 becomes bool_var. */
12640 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12641 && code == NE_EXPR)
12642 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12644 /* bool_var == 1 becomes bool_var. */
12645 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12646 && code == EQ_EXPR)
12647 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12649 /* bool_var != 1 becomes !bool_var. */
12650 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12651 && code == NE_EXPR)
12652 return fold_convert_loc (loc, type,
12653 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12654 TREE_TYPE (arg0), arg0));
12656 /* bool_var == 0 becomes !bool_var. */
12657 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12658 && code == EQ_EXPR)
12659 return fold_convert_loc (loc, type,
12660 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12661 TREE_TYPE (arg0), arg0));
12663 /* !exp != 0 becomes !exp */
12664 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12665 && code == NE_EXPR)
12666 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12668 /* If this is an equality comparison of the address of two non-weak,
12669 unaliased symbols neither of which are extern (since we do not
12670 have access to attributes for externs), then we know the result. */
12671 if (TREE_CODE (arg0) == ADDR_EXPR
12672 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12673 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12674 && ! lookup_attribute ("alias",
12675 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12676 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12677 && TREE_CODE (arg1) == ADDR_EXPR
12678 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12679 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12680 && ! lookup_attribute ("alias",
12681 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12682 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12684 /* We know that we're looking at the address of two
12685 non-weak, unaliased, static _DECL nodes.
12687 It is both wasteful and incorrect to call operand_equal_p
12688 to compare the two ADDR_EXPR nodes. It is wasteful in that
12689 all we need to do is test pointer equality for the arguments
12690 to the two ADDR_EXPR nodes. It is incorrect to use
12691 operand_equal_p as that function is NOT equivalent to a
12692 C equality test. It can in fact return false for two
12693 objects which would test as equal using the C equality
12694 operator. */
12695 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12696 return constant_boolean_node (equal
12697 ? code == EQ_EXPR : code != EQ_EXPR,
12698 type);
12701 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12702 a MINUS_EXPR of a constant, we can convert it into a comparison with
12703 a revised constant as long as no overflow occurs. */
12704 if (TREE_CODE (arg1) == INTEGER_CST
12705 && (TREE_CODE (arg0) == PLUS_EXPR
12706 || TREE_CODE (arg0) == MINUS_EXPR)
12707 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12708 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12709 ? MINUS_EXPR : PLUS_EXPR,
12710 fold_convert_loc (loc, TREE_TYPE (arg0),
12711 arg1),
12712 TREE_OPERAND (arg0, 1)))
12713 && !TREE_OVERFLOW (tem))
12714 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12716 /* Similarly for a NEGATE_EXPR. */
12717 if (TREE_CODE (arg0) == NEGATE_EXPR
12718 && TREE_CODE (arg1) == INTEGER_CST
12719 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12720 arg1)))
12721 && TREE_CODE (tem) == INTEGER_CST
12722 && !TREE_OVERFLOW (tem))
12723 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12725 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12726 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12727 && TREE_CODE (arg1) == INTEGER_CST
12728 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12729 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12730 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12731 fold_convert_loc (loc,
12732 TREE_TYPE (arg0),
12733 arg1),
12734 TREE_OPERAND (arg0, 1)));
12736 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12737 if ((TREE_CODE (arg0) == PLUS_EXPR
12738 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12739 || TREE_CODE (arg0) == MINUS_EXPR)
12740 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12741 0)),
12742 arg1, 0)
12743 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12744 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12746 tree val = TREE_OPERAND (arg0, 1);
12747 return omit_two_operands_loc (loc, type,
12748 fold_build2_loc (loc, code, type,
12749 val,
12750 build_int_cst (TREE_TYPE (val),
12751 0)),
12752 TREE_OPERAND (arg0, 0), arg1);
12755 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12756 if (TREE_CODE (arg0) == MINUS_EXPR
12757 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12758 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12759 1)),
12760 arg1, 0)
12761 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12763 return omit_two_operands_loc (loc, type,
12764 code == NE_EXPR
12765 ? boolean_true_node : boolean_false_node,
12766 TREE_OPERAND (arg0, 1), arg1);
12769 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12770 for !=. Don't do this for ordered comparisons due to overflow. */
12771 if (TREE_CODE (arg0) == MINUS_EXPR
12772 && integer_zerop (arg1))
12773 return fold_build2_loc (loc, code, type,
12774 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12776 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12777 if (TREE_CODE (arg0) == ABS_EXPR
12778 && (integer_zerop (arg1) || real_zerop (arg1)))
12779 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12781 /* If this is an EQ or NE comparison with zero and ARG0 is
12782 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12783 two operations, but the latter can be done in one less insn
12784 on machines that have only two-operand insns or on which a
12785 constant cannot be the first operand. */
12786 if (TREE_CODE (arg0) == BIT_AND_EXPR
12787 && integer_zerop (arg1))
12789 tree arg00 = TREE_OPERAND (arg0, 0);
12790 tree arg01 = TREE_OPERAND (arg0, 1);
12791 if (TREE_CODE (arg00) == LSHIFT_EXPR
12792 && integer_onep (TREE_OPERAND (arg00, 0)))
12794 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12795 arg01, TREE_OPERAND (arg00, 1));
12796 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12797 build_int_cst (TREE_TYPE (arg0), 1));
12798 return fold_build2_loc (loc, code, type,
12799 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12800 arg1);
12802 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12803 && integer_onep (TREE_OPERAND (arg01, 0)))
12805 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12806 arg00, TREE_OPERAND (arg01, 1));
12807 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12808 build_int_cst (TREE_TYPE (arg0), 1));
12809 return fold_build2_loc (loc, code, type,
12810 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12811 arg1);
12815 /* If this is an NE or EQ comparison of zero against the result of a
12816 signed MOD operation whose second operand is a power of 2, make
12817 the MOD operation unsigned since it is simpler and equivalent. */
12818 if (integer_zerop (arg1)
12819 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12820 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12821 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12822 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12823 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12824 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12826 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12827 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12828 fold_convert_loc (loc, newtype,
12829 TREE_OPERAND (arg0, 0)),
12830 fold_convert_loc (loc, newtype,
12831 TREE_OPERAND (arg0, 1)));
12833 return fold_build2_loc (loc, code, type, newmod,
12834 fold_convert_loc (loc, newtype, arg1));
12837 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12838 C1 is a valid shift constant, and C2 is a power of two, i.e.
12839 a single bit. */
12840 if (TREE_CODE (arg0) == BIT_AND_EXPR
12841 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12842 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12843 == INTEGER_CST
12844 && integer_pow2p (TREE_OPERAND (arg0, 1))
12845 && integer_zerop (arg1))
12847 tree itype = TREE_TYPE (arg0);
12848 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12849 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12851 /* Check for a valid shift count. */
12852 if (TREE_INT_CST_HIGH (arg001) == 0
12853 && TREE_INT_CST_LOW (arg001) < prec)
12855 tree arg01 = TREE_OPERAND (arg0, 1);
12856 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12857 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12858 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12859 can be rewritten as (X & (C2 << C1)) != 0. */
12860 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12862 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12863 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12864 return fold_build2_loc (loc, code, type, tem,
12865 fold_convert_loc (loc, itype, arg1));
12867 /* Otherwise, for signed (arithmetic) shifts,
12868 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12869 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12870 else if (!TYPE_UNSIGNED (itype))
12871 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12872 arg000, build_int_cst (itype, 0));
12873 /* Otherwise, of unsigned (logical) shifts,
12874 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12875 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12876 else
12877 return omit_one_operand_loc (loc, type,
12878 code == EQ_EXPR ? integer_one_node
12879 : integer_zero_node,
12880 arg000);
12884 /* If we have (A & C) == C where C is a power of 2, convert this into
12885 (A & C) != 0. Similarly for NE_EXPR. */
12886 if (TREE_CODE (arg0) == BIT_AND_EXPR
12887 && integer_pow2p (TREE_OPERAND (arg0, 1))
12888 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12889 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12890 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12891 integer_zero_node));
12893 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12894 bit, then fold the expression into A < 0 or A >= 0. */
12895 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12896 if (tem)
12897 return tem;
12899 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12900 Similarly for NE_EXPR. */
12901 if (TREE_CODE (arg0) == BIT_AND_EXPR
12902 && TREE_CODE (arg1) == INTEGER_CST
12903 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12905 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12906 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12907 TREE_OPERAND (arg0, 1));
12908 tree dandnotc
12909 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12910 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12911 notc);
12912 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12913 if (integer_nonzerop (dandnotc))
12914 return omit_one_operand_loc (loc, type, rslt, arg0);
12917 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12918 Similarly for NE_EXPR. */
12919 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12920 && TREE_CODE (arg1) == INTEGER_CST
12921 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12923 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12924 tree candnotd
12925 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12926 TREE_OPERAND (arg0, 1),
12927 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12928 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12929 if (integer_nonzerop (candnotd))
12930 return omit_one_operand_loc (loc, type, rslt, arg0);
12933 /* If this is a comparison of a field, we may be able to simplify it. */
12934 if ((TREE_CODE (arg0) == COMPONENT_REF
12935 || TREE_CODE (arg0) == BIT_FIELD_REF)
12936 /* Handle the constant case even without -O
12937 to make sure the warnings are given. */
12938 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12940 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12941 if (t1)
12942 return t1;
12945 /* Optimize comparisons of strlen vs zero to a compare of the
12946 first character of the string vs zero. To wit,
12947 strlen(ptr) == 0 => *ptr == 0
12948 strlen(ptr) != 0 => *ptr != 0
12949 Other cases should reduce to one of these two (or a constant)
12950 due to the return value of strlen being unsigned. */
12951 if (TREE_CODE (arg0) == CALL_EXPR
12952 && integer_zerop (arg1))
12954 tree fndecl = get_callee_fndecl (arg0);
12956 if (fndecl
12957 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12958 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12959 && call_expr_nargs (arg0) == 1
12960 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12962 tree iref = build_fold_indirect_ref_loc (loc,
12963 CALL_EXPR_ARG (arg0, 0));
12964 return fold_build2_loc (loc, code, type, iref,
12965 build_int_cst (TREE_TYPE (iref), 0));
12969 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12970 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12971 if (TREE_CODE (arg0) == RSHIFT_EXPR
12972 && integer_zerop (arg1)
12973 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12975 tree arg00 = TREE_OPERAND (arg0, 0);
12976 tree arg01 = TREE_OPERAND (arg0, 1);
12977 tree itype = TREE_TYPE (arg00);
12978 if (TREE_INT_CST_HIGH (arg01) == 0
12979 && TREE_INT_CST_LOW (arg01)
12980 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12982 if (TYPE_UNSIGNED (itype))
12984 itype = signed_type_for (itype);
12985 arg00 = fold_convert_loc (loc, itype, arg00);
12987 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12988 type, arg00, build_zero_cst (itype));
12992 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12993 if (integer_zerop (arg1)
12994 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12995 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12996 TREE_OPERAND (arg0, 1));
12998 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12999 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13000 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13001 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13002 build_zero_cst (TREE_TYPE (arg0)));
13003 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13004 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13005 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13006 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13007 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13008 build_zero_cst (TREE_TYPE (arg0)));
13010 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13011 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13012 && TREE_CODE (arg1) == INTEGER_CST
13013 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13014 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13015 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13016 TREE_OPERAND (arg0, 1), arg1));
13018 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13019 (X & C) == 0 when C is a single bit. */
13020 if (TREE_CODE (arg0) == BIT_AND_EXPR
13021 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13022 && integer_zerop (arg1)
13023 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13025 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13026 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13027 TREE_OPERAND (arg0, 1));
13028 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13029 type, tem,
13030 fold_convert_loc (loc, TREE_TYPE (arg0),
13031 arg1));
13034 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13035 constant C is a power of two, i.e. a single bit. */
13036 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13037 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13038 && integer_zerop (arg1)
13039 && integer_pow2p (TREE_OPERAND (arg0, 1))
13040 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13041 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13043 tree arg00 = TREE_OPERAND (arg0, 0);
13044 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13045 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13048 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13049 when is C is a power of two, i.e. a single bit. */
13050 if (TREE_CODE (arg0) == BIT_AND_EXPR
13051 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13052 && integer_zerop (arg1)
13053 && integer_pow2p (TREE_OPERAND (arg0, 1))
13054 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13055 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13057 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13058 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13059 arg000, TREE_OPERAND (arg0, 1));
13060 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13061 tem, build_int_cst (TREE_TYPE (tem), 0));
13064 if (integer_zerop (arg1)
13065 && tree_expr_nonzero_p (arg0))
13067 tree res = constant_boolean_node (code==NE_EXPR, type);
13068 return omit_one_operand_loc (loc, type, res, arg0);
13071 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13072 if (TREE_CODE (arg0) == NEGATE_EXPR
13073 && TREE_CODE (arg1) == NEGATE_EXPR)
13074 return fold_build2_loc (loc, code, type,
13075 TREE_OPERAND (arg0, 0),
13076 fold_convert_loc (loc, TREE_TYPE (arg0),
13077 TREE_OPERAND (arg1, 0)));
13079 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13080 if (TREE_CODE (arg0) == BIT_AND_EXPR
13081 && TREE_CODE (arg1) == BIT_AND_EXPR)
13083 tree arg00 = TREE_OPERAND (arg0, 0);
13084 tree arg01 = TREE_OPERAND (arg0, 1);
13085 tree arg10 = TREE_OPERAND (arg1, 0);
13086 tree arg11 = TREE_OPERAND (arg1, 1);
13087 tree itype = TREE_TYPE (arg0);
13089 if (operand_equal_p (arg01, arg11, 0))
13090 return fold_build2_loc (loc, code, type,
13091 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13092 fold_build2_loc (loc,
13093 BIT_XOR_EXPR, itype,
13094 arg00, arg10),
13095 arg01),
13096 build_zero_cst (itype));
13098 if (operand_equal_p (arg01, arg10, 0))
13099 return fold_build2_loc (loc, code, type,
13100 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13101 fold_build2_loc (loc,
13102 BIT_XOR_EXPR, itype,
13103 arg00, arg11),
13104 arg01),
13105 build_zero_cst (itype));
13107 if (operand_equal_p (arg00, arg11, 0))
13108 return fold_build2_loc (loc, code, type,
13109 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13110 fold_build2_loc (loc,
13111 BIT_XOR_EXPR, itype,
13112 arg01, arg10),
13113 arg00),
13114 build_zero_cst (itype));
13116 if (operand_equal_p (arg00, arg10, 0))
13117 return fold_build2_loc (loc, code, type,
13118 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13119 fold_build2_loc (loc,
13120 BIT_XOR_EXPR, itype,
13121 arg01, arg11),
13122 arg00),
13123 build_zero_cst (itype));
13126 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13127 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13129 tree arg00 = TREE_OPERAND (arg0, 0);
13130 tree arg01 = TREE_OPERAND (arg0, 1);
13131 tree arg10 = TREE_OPERAND (arg1, 0);
13132 tree arg11 = TREE_OPERAND (arg1, 1);
13133 tree itype = TREE_TYPE (arg0);
13135 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13136 operand_equal_p guarantees no side-effects so we don't need
13137 to use omit_one_operand on Z. */
13138 if (operand_equal_p (arg01, arg11, 0))
13139 return fold_build2_loc (loc, code, type, arg00,
13140 fold_convert_loc (loc, TREE_TYPE (arg00),
13141 arg10));
13142 if (operand_equal_p (arg01, arg10, 0))
13143 return fold_build2_loc (loc, code, type, arg00,
13144 fold_convert_loc (loc, TREE_TYPE (arg00),
13145 arg11));
13146 if (operand_equal_p (arg00, arg11, 0))
13147 return fold_build2_loc (loc, code, type, arg01,
13148 fold_convert_loc (loc, TREE_TYPE (arg01),
13149 arg10));
13150 if (operand_equal_p (arg00, arg10, 0))
13151 return fold_build2_loc (loc, code, type, arg01,
13152 fold_convert_loc (loc, TREE_TYPE (arg01),
13153 arg11));
13155 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13156 if (TREE_CODE (arg01) == INTEGER_CST
13157 && TREE_CODE (arg11) == INTEGER_CST)
13159 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13160 fold_convert_loc (loc, itype, arg11));
13161 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13162 return fold_build2_loc (loc, code, type, tem,
13163 fold_convert_loc (loc, itype, arg10));
13167 /* Attempt to simplify equality/inequality comparisons of complex
13168 values. Only lower the comparison if the result is known or
13169 can be simplified to a single scalar comparison. */
13170 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13171 || TREE_CODE (arg0) == COMPLEX_CST)
13172 && (TREE_CODE (arg1) == COMPLEX_EXPR
13173 || TREE_CODE (arg1) == COMPLEX_CST))
13175 tree real0, imag0, real1, imag1;
13176 tree rcond, icond;
13178 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13180 real0 = TREE_OPERAND (arg0, 0);
13181 imag0 = TREE_OPERAND (arg0, 1);
13183 else
13185 real0 = TREE_REALPART (arg0);
13186 imag0 = TREE_IMAGPART (arg0);
13189 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13191 real1 = TREE_OPERAND (arg1, 0);
13192 imag1 = TREE_OPERAND (arg1, 1);
13194 else
13196 real1 = TREE_REALPART (arg1);
13197 imag1 = TREE_IMAGPART (arg1);
13200 rcond = fold_binary_loc (loc, code, type, real0, real1);
13201 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13203 if (integer_zerop (rcond))
13205 if (code == EQ_EXPR)
13206 return omit_two_operands_loc (loc, type, boolean_false_node,
13207 imag0, imag1);
13208 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13210 else
13212 if (code == NE_EXPR)
13213 return omit_two_operands_loc (loc, type, boolean_true_node,
13214 imag0, imag1);
13215 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13219 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13220 if (icond && TREE_CODE (icond) == INTEGER_CST)
13222 if (integer_zerop (icond))
13224 if (code == EQ_EXPR)
13225 return omit_two_operands_loc (loc, type, boolean_false_node,
13226 real0, real1);
13227 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13229 else
13231 if (code == NE_EXPR)
13232 return omit_two_operands_loc (loc, type, boolean_true_node,
13233 real0, real1);
13234 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13239 return NULL_TREE;
13241 case LT_EXPR:
13242 case GT_EXPR:
13243 case LE_EXPR:
13244 case GE_EXPR:
13245 tem = fold_comparison (loc, code, type, op0, op1);
13246 if (tem != NULL_TREE)
13247 return tem;
13249 /* Transform comparisons of the form X +- C CMP X. */
13250 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13251 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13252 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13253 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13254 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13255 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13257 tree arg01 = TREE_OPERAND (arg0, 1);
13258 enum tree_code code0 = TREE_CODE (arg0);
13259 int is_positive;
13261 if (TREE_CODE (arg01) == REAL_CST)
13262 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13263 else
13264 is_positive = tree_int_cst_sgn (arg01);
13266 /* (X - c) > X becomes false. */
13267 if (code == GT_EXPR
13268 && ((code0 == MINUS_EXPR && is_positive >= 0)
13269 || (code0 == PLUS_EXPR && is_positive <= 0)))
13271 if (TREE_CODE (arg01) == INTEGER_CST
13272 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13273 fold_overflow_warning (("assuming signed overflow does not "
13274 "occur when assuming that (X - c) > X "
13275 "is always false"),
13276 WARN_STRICT_OVERFLOW_ALL);
13277 return constant_boolean_node (0, type);
13280 /* Likewise (X + c) < X becomes false. */
13281 if (code == LT_EXPR
13282 && ((code0 == PLUS_EXPR && is_positive >= 0)
13283 || (code0 == MINUS_EXPR && is_positive <= 0)))
13285 if (TREE_CODE (arg01) == INTEGER_CST
13286 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13287 fold_overflow_warning (("assuming signed overflow does not "
13288 "occur when assuming that "
13289 "(X + c) < X is always false"),
13290 WARN_STRICT_OVERFLOW_ALL);
13291 return constant_boolean_node (0, type);
13294 /* Convert (X - c) <= X to true. */
13295 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13296 && code == LE_EXPR
13297 && ((code0 == MINUS_EXPR && is_positive >= 0)
13298 || (code0 == PLUS_EXPR && is_positive <= 0)))
13300 if (TREE_CODE (arg01) == INTEGER_CST
13301 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13302 fold_overflow_warning (("assuming signed overflow does not "
13303 "occur when assuming that "
13304 "(X - c) <= X is always true"),
13305 WARN_STRICT_OVERFLOW_ALL);
13306 return constant_boolean_node (1, type);
13309 /* Convert (X + c) >= X to true. */
13310 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13311 && code == GE_EXPR
13312 && ((code0 == PLUS_EXPR && is_positive >= 0)
13313 || (code0 == MINUS_EXPR && is_positive <= 0)))
13315 if (TREE_CODE (arg01) == INTEGER_CST
13316 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13317 fold_overflow_warning (("assuming signed overflow does not "
13318 "occur when assuming that "
13319 "(X + c) >= X is always true"),
13320 WARN_STRICT_OVERFLOW_ALL);
13321 return constant_boolean_node (1, type);
13324 if (TREE_CODE (arg01) == INTEGER_CST)
13326 /* Convert X + c > X and X - c < X to true for integers. */
13327 if (code == GT_EXPR
13328 && ((code0 == PLUS_EXPR && is_positive > 0)
13329 || (code0 == MINUS_EXPR && is_positive < 0)))
13331 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13332 fold_overflow_warning (("assuming signed overflow does "
13333 "not occur when assuming that "
13334 "(X + c) > X is always true"),
13335 WARN_STRICT_OVERFLOW_ALL);
13336 return constant_boolean_node (1, type);
13339 if (code == LT_EXPR
13340 && ((code0 == MINUS_EXPR && is_positive > 0)
13341 || (code0 == PLUS_EXPR && is_positive < 0)))
13343 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13344 fold_overflow_warning (("assuming signed overflow does "
13345 "not occur when assuming that "
13346 "(X - c) < X is always true"),
13347 WARN_STRICT_OVERFLOW_ALL);
13348 return constant_boolean_node (1, type);
13351 /* Convert X + c <= X and X - c >= X to false for integers. */
13352 if (code == LE_EXPR
13353 && ((code0 == PLUS_EXPR && is_positive > 0)
13354 || (code0 == MINUS_EXPR && is_positive < 0)))
13356 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13357 fold_overflow_warning (("assuming signed overflow does "
13358 "not occur when assuming that "
13359 "(X + c) <= X is always false"),
13360 WARN_STRICT_OVERFLOW_ALL);
13361 return constant_boolean_node (0, type);
13364 if (code == GE_EXPR
13365 && ((code0 == MINUS_EXPR && is_positive > 0)
13366 || (code0 == PLUS_EXPR && is_positive < 0)))
13368 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13369 fold_overflow_warning (("assuming signed overflow does "
13370 "not occur when assuming that "
13371 "(X - c) >= X is always false"),
13372 WARN_STRICT_OVERFLOW_ALL);
13373 return constant_boolean_node (0, type);
13378 /* Comparisons with the highest or lowest possible integer of
13379 the specified precision will have known values. */
13381 tree arg1_type = TREE_TYPE (arg1);
13382 unsigned int width = TYPE_PRECISION (arg1_type);
13384 if (TREE_CODE (arg1) == INTEGER_CST
13385 && width <= HOST_BITS_PER_DOUBLE_INT
13386 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13388 HOST_WIDE_INT signed_max_hi;
13389 unsigned HOST_WIDE_INT signed_max_lo;
13390 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13392 if (width <= HOST_BITS_PER_WIDE_INT)
13394 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13395 - 1;
13396 signed_max_hi = 0;
13397 max_hi = 0;
13399 if (TYPE_UNSIGNED (arg1_type))
13401 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13402 min_lo = 0;
13403 min_hi = 0;
13405 else
13407 max_lo = signed_max_lo;
13408 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13409 min_hi = -1;
13412 else
13414 width -= HOST_BITS_PER_WIDE_INT;
13415 signed_max_lo = -1;
13416 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13417 - 1;
13418 max_lo = -1;
13419 min_lo = 0;
13421 if (TYPE_UNSIGNED (arg1_type))
13423 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13424 min_hi = 0;
13426 else
13428 max_hi = signed_max_hi;
13429 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13433 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13434 && TREE_INT_CST_LOW (arg1) == max_lo)
13435 switch (code)
13437 case GT_EXPR:
13438 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13440 case GE_EXPR:
13441 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13443 case LE_EXPR:
13444 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13446 case LT_EXPR:
13447 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13449 /* The GE_EXPR and LT_EXPR cases above are not normally
13450 reached because of previous transformations. */
13452 default:
13453 break;
13455 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13456 == max_hi
13457 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13458 switch (code)
13460 case GT_EXPR:
13461 arg1 = const_binop (PLUS_EXPR, arg1,
13462 build_int_cst (TREE_TYPE (arg1), 1));
13463 return fold_build2_loc (loc, EQ_EXPR, type,
13464 fold_convert_loc (loc,
13465 TREE_TYPE (arg1), arg0),
13466 arg1);
13467 case LE_EXPR:
13468 arg1 = const_binop (PLUS_EXPR, arg1,
13469 build_int_cst (TREE_TYPE (arg1), 1));
13470 return fold_build2_loc (loc, NE_EXPR, type,
13471 fold_convert_loc (loc, TREE_TYPE (arg1),
13472 arg0),
13473 arg1);
13474 default:
13475 break;
13477 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13478 == min_hi
13479 && TREE_INT_CST_LOW (arg1) == min_lo)
13480 switch (code)
13482 case LT_EXPR:
13483 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13485 case LE_EXPR:
13486 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13488 case GE_EXPR:
13489 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13491 case GT_EXPR:
13492 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13494 default:
13495 break;
13497 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13498 == min_hi
13499 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13500 switch (code)
13502 case GE_EXPR:
13503 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13504 return fold_build2_loc (loc, NE_EXPR, type,
13505 fold_convert_loc (loc,
13506 TREE_TYPE (arg1), arg0),
13507 arg1);
13508 case LT_EXPR:
13509 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13510 return fold_build2_loc (loc, EQ_EXPR, type,
13511 fold_convert_loc (loc, TREE_TYPE (arg1),
13512 arg0),
13513 arg1);
13514 default:
13515 break;
13518 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13519 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13520 && TYPE_UNSIGNED (arg1_type)
13521 /* We will flip the signedness of the comparison operator
13522 associated with the mode of arg1, so the sign bit is
13523 specified by this mode. Check that arg1 is the signed
13524 max associated with this sign bit. */
13525 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13526 /* signed_type does not work on pointer types. */
13527 && INTEGRAL_TYPE_P (arg1_type))
13529 /* The following case also applies to X < signed_max+1
13530 and X >= signed_max+1 because previous transformations. */
13531 if (code == LE_EXPR || code == GT_EXPR)
13533 tree st;
13534 st = signed_type_for (TREE_TYPE (arg1));
13535 return fold_build2_loc (loc,
13536 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13537 type, fold_convert_loc (loc, st, arg0),
13538 build_int_cst (st, 0));
13544 /* If we are comparing an ABS_EXPR with a constant, we can
13545 convert all the cases into explicit comparisons, but they may
13546 well not be faster than doing the ABS and one comparison.
13547 But ABS (X) <= C is a range comparison, which becomes a subtraction
13548 and a comparison, and is probably faster. */
13549 if (code == LE_EXPR
13550 && TREE_CODE (arg1) == INTEGER_CST
13551 && TREE_CODE (arg0) == ABS_EXPR
13552 && ! TREE_SIDE_EFFECTS (arg0)
13553 && (0 != (tem = negate_expr (arg1)))
13554 && TREE_CODE (tem) == INTEGER_CST
13555 && !TREE_OVERFLOW (tem))
13556 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13557 build2 (GE_EXPR, type,
13558 TREE_OPERAND (arg0, 0), tem),
13559 build2 (LE_EXPR, type,
13560 TREE_OPERAND (arg0, 0), arg1));
13562 /* Convert ABS_EXPR<x> >= 0 to true. */
13563 strict_overflow_p = false;
13564 if (code == GE_EXPR
13565 && (integer_zerop (arg1)
13566 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13567 && real_zerop (arg1)))
13568 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13570 if (strict_overflow_p)
13571 fold_overflow_warning (("assuming signed overflow does not occur "
13572 "when simplifying comparison of "
13573 "absolute value and zero"),
13574 WARN_STRICT_OVERFLOW_CONDITIONAL);
13575 return omit_one_operand_loc (loc, type,
13576 constant_boolean_node (true, type),
13577 arg0);
13580 /* Convert ABS_EXPR<x> < 0 to false. */
13581 strict_overflow_p = false;
13582 if (code == LT_EXPR
13583 && (integer_zerop (arg1) || real_zerop (arg1))
13584 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13586 if (strict_overflow_p)
13587 fold_overflow_warning (("assuming signed overflow does not occur "
13588 "when simplifying comparison of "
13589 "absolute value and zero"),
13590 WARN_STRICT_OVERFLOW_CONDITIONAL);
13591 return omit_one_operand_loc (loc, type,
13592 constant_boolean_node (false, type),
13593 arg0);
13596 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13597 and similarly for >= into !=. */
13598 if ((code == LT_EXPR || code == GE_EXPR)
13599 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13600 && TREE_CODE (arg1) == LSHIFT_EXPR
13601 && integer_onep (TREE_OPERAND (arg1, 0)))
13602 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13603 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13604 TREE_OPERAND (arg1, 1)),
13605 build_zero_cst (TREE_TYPE (arg0)));
13607 if ((code == LT_EXPR || code == GE_EXPR)
13608 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13609 && CONVERT_EXPR_P (arg1)
13610 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13611 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13613 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13614 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13615 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13616 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13617 build_zero_cst (TREE_TYPE (arg0)));
13620 return NULL_TREE;
13622 case UNORDERED_EXPR:
13623 case ORDERED_EXPR:
13624 case UNLT_EXPR:
13625 case UNLE_EXPR:
13626 case UNGT_EXPR:
13627 case UNGE_EXPR:
13628 case UNEQ_EXPR:
13629 case LTGT_EXPR:
13630 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13632 t1 = fold_relational_const (code, type, arg0, arg1);
13633 if (t1 != NULL_TREE)
13634 return t1;
13637 /* If the first operand is NaN, the result is constant. */
13638 if (TREE_CODE (arg0) == REAL_CST
13639 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13640 && (code != LTGT_EXPR || ! flag_trapping_math))
13642 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13643 ? integer_zero_node
13644 : integer_one_node;
13645 return omit_one_operand_loc (loc, type, t1, arg1);
13648 /* If the second operand is NaN, the result is constant. */
13649 if (TREE_CODE (arg1) == REAL_CST
13650 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13651 && (code != LTGT_EXPR || ! flag_trapping_math))
13653 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13654 ? integer_zero_node
13655 : integer_one_node;
13656 return omit_one_operand_loc (loc, type, t1, arg0);
13659 /* Simplify unordered comparison of something with itself. */
13660 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13661 && operand_equal_p (arg0, arg1, 0))
13662 return constant_boolean_node (1, type);
13664 if (code == LTGT_EXPR
13665 && !flag_trapping_math
13666 && operand_equal_p (arg0, arg1, 0))
13667 return constant_boolean_node (0, type);
13669 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13671 tree targ0 = strip_float_extensions (arg0);
13672 tree targ1 = strip_float_extensions (arg1);
13673 tree newtype = TREE_TYPE (targ0);
13675 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13676 newtype = TREE_TYPE (targ1);
13678 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13679 return fold_build2_loc (loc, code, type,
13680 fold_convert_loc (loc, newtype, targ0),
13681 fold_convert_loc (loc, newtype, targ1));
13684 return NULL_TREE;
13686 case COMPOUND_EXPR:
13687 /* When pedantic, a compound expression can be neither an lvalue
13688 nor an integer constant expression. */
13689 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13690 return NULL_TREE;
13691 /* Don't let (0, 0) be null pointer constant. */
13692 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13693 : fold_convert_loc (loc, type, arg1);
13694 return pedantic_non_lvalue_loc (loc, tem);
13696 case COMPLEX_EXPR:
13697 if ((TREE_CODE (arg0) == REAL_CST
13698 && TREE_CODE (arg1) == REAL_CST)
13699 || (TREE_CODE (arg0) == INTEGER_CST
13700 && TREE_CODE (arg1) == INTEGER_CST))
13701 return build_complex (type, arg0, arg1);
13702 if (TREE_CODE (arg0) == REALPART_EXPR
13703 && TREE_CODE (arg1) == IMAGPART_EXPR
13704 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13705 && operand_equal_p (TREE_OPERAND (arg0, 0),
13706 TREE_OPERAND (arg1, 0), 0))
13707 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13708 TREE_OPERAND (arg1, 0));
13709 return NULL_TREE;
13711 case ASSERT_EXPR:
13712 /* An ASSERT_EXPR should never be passed to fold_binary. */
13713 gcc_unreachable ();
13715 case VEC_PACK_TRUNC_EXPR:
13716 case VEC_PACK_FIX_TRUNC_EXPR:
13718 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13719 tree *elts;
13721 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13722 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13723 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13724 return NULL_TREE;
13726 elts = XALLOCAVEC (tree, nelts);
13727 if (!vec_cst_ctor_to_array (arg0, elts)
13728 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13729 return NULL_TREE;
13731 for (i = 0; i < nelts; i++)
13733 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13734 ? NOP_EXPR : FIX_TRUNC_EXPR,
13735 TREE_TYPE (type), elts[i]);
13736 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13737 return NULL_TREE;
13740 return build_vector (type, elts);
13743 case VEC_WIDEN_MULT_LO_EXPR:
13744 case VEC_WIDEN_MULT_HI_EXPR:
13745 case VEC_WIDEN_MULT_EVEN_EXPR:
13746 case VEC_WIDEN_MULT_ODD_EXPR:
13748 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13749 unsigned int out, ofs, scale;
13750 tree *elts;
13752 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13753 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13754 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13755 return NULL_TREE;
13757 elts = XALLOCAVEC (tree, nelts * 4);
13758 if (!vec_cst_ctor_to_array (arg0, elts)
13759 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13760 return NULL_TREE;
13762 if (code == VEC_WIDEN_MULT_LO_EXPR)
13763 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13764 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13765 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13766 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13767 scale = 1, ofs = 0;
13768 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13769 scale = 1, ofs = 1;
13771 for (out = 0; out < nelts; out++)
13773 unsigned int in1 = (out << scale) + ofs;
13774 unsigned int in2 = in1 + nelts * 2;
13775 tree t1, t2;
13777 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13778 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13780 if (t1 == NULL_TREE || t2 == NULL_TREE)
13781 return NULL_TREE;
13782 elts[out] = const_binop (MULT_EXPR, t1, t2);
13783 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13784 return NULL_TREE;
13787 return build_vector (type, elts);
13790 default:
13791 return NULL_TREE;
13792 } /* switch (code) */
13795 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13796 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13797 of GOTO_EXPR. */
13799 static tree
13800 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13802 switch (TREE_CODE (*tp))
13804 case LABEL_EXPR:
13805 return *tp;
13807 case GOTO_EXPR:
13808 *walk_subtrees = 0;
13810 /* ... fall through ... */
13812 default:
13813 return NULL_TREE;
13817 /* Return whether the sub-tree ST contains a label which is accessible from
13818 outside the sub-tree. */
13820 static bool
13821 contains_label_p (tree st)
13823 return
13824 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13827 /* Fold a ternary expression of code CODE and type TYPE with operands
13828 OP0, OP1, and OP2. Return the folded expression if folding is
13829 successful. Otherwise, return NULL_TREE. */
13831 tree
13832 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13833 tree op0, tree op1, tree op2)
13835 tree tem;
13836 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13837 enum tree_code_class kind = TREE_CODE_CLASS (code);
13839 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13840 && TREE_CODE_LENGTH (code) == 3);
13842 /* Strip any conversions that don't change the mode. This is safe
13843 for every expression, except for a comparison expression because
13844 its signedness is derived from its operands. So, in the latter
13845 case, only strip conversions that don't change the signedness.
13847 Note that this is done as an internal manipulation within the
13848 constant folder, in order to find the simplest representation of
13849 the arguments so that their form can be studied. In any cases,
13850 the appropriate type conversions should be put back in the tree
13851 that will get out of the constant folder. */
13852 if (op0)
13854 arg0 = op0;
13855 STRIP_NOPS (arg0);
13858 if (op1)
13860 arg1 = op1;
13861 STRIP_NOPS (arg1);
13864 if (op2)
13866 arg2 = op2;
13867 STRIP_NOPS (arg2);
13870 switch (code)
13872 case COMPONENT_REF:
13873 if (TREE_CODE (arg0) == CONSTRUCTOR
13874 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13876 unsigned HOST_WIDE_INT idx;
13877 tree field, value;
13878 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13879 if (field == arg1)
13880 return value;
13882 return NULL_TREE;
13884 case COND_EXPR:
13885 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13886 so all simple results must be passed through pedantic_non_lvalue. */
13887 if (TREE_CODE (arg0) == INTEGER_CST)
13889 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13890 tem = integer_zerop (arg0) ? op2 : op1;
13891 /* Only optimize constant conditions when the selected branch
13892 has the same type as the COND_EXPR. This avoids optimizing
13893 away "c ? x : throw", where the throw has a void type.
13894 Avoid throwing away that operand which contains label. */
13895 if ((!TREE_SIDE_EFFECTS (unused_op)
13896 || !contains_label_p (unused_op))
13897 && (! VOID_TYPE_P (TREE_TYPE (tem))
13898 || VOID_TYPE_P (type)))
13899 return pedantic_non_lvalue_loc (loc, tem);
13900 return NULL_TREE;
13902 if (operand_equal_p (arg1, op2, 0))
13903 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13905 /* If we have A op B ? A : C, we may be able to convert this to a
13906 simpler expression, depending on the operation and the values
13907 of B and C. Signed zeros prevent all of these transformations,
13908 for reasons given above each one.
13910 Also try swapping the arguments and inverting the conditional. */
13911 if (COMPARISON_CLASS_P (arg0)
13912 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13913 arg1, TREE_OPERAND (arg0, 1))
13914 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13916 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13917 if (tem)
13918 return tem;
13921 if (COMPARISON_CLASS_P (arg0)
13922 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13923 op2,
13924 TREE_OPERAND (arg0, 1))
13925 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13927 location_t loc0 = expr_location_or (arg0, loc);
13928 tem = fold_truth_not_expr (loc0, arg0);
13929 if (tem && COMPARISON_CLASS_P (tem))
13931 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13932 if (tem)
13933 return tem;
13937 /* If the second operand is simpler than the third, swap them
13938 since that produces better jump optimization results. */
13939 if (truth_value_p (TREE_CODE (arg0))
13940 && tree_swap_operands_p (op1, op2, false))
13942 location_t loc0 = expr_location_or (arg0, loc);
13943 /* See if this can be inverted. If it can't, possibly because
13944 it was a floating-point inequality comparison, don't do
13945 anything. */
13946 tem = fold_truth_not_expr (loc0, arg0);
13947 if (tem)
13948 return fold_build3_loc (loc, code, type, tem, op2, op1);
13951 /* Convert A ? 1 : 0 to simply A. */
13952 if (integer_onep (op1)
13953 && integer_zerop (op2)
13954 /* If we try to convert OP0 to our type, the
13955 call to fold will try to move the conversion inside
13956 a COND, which will recurse. In that case, the COND_EXPR
13957 is probably the best choice, so leave it alone. */
13958 && type == TREE_TYPE (arg0))
13959 return pedantic_non_lvalue_loc (loc, arg0);
13961 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13962 over COND_EXPR in cases such as floating point comparisons. */
13963 if (integer_zerop (op1)
13964 && integer_onep (op2)
13965 && truth_value_p (TREE_CODE (arg0)))
13966 return pedantic_non_lvalue_loc (loc,
13967 fold_convert_loc (loc, type,
13968 invert_truthvalue_loc (loc,
13969 arg0)));
13971 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13972 if (TREE_CODE (arg0) == LT_EXPR
13973 && integer_zerop (TREE_OPERAND (arg0, 1))
13974 && integer_zerop (op2)
13975 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13977 /* sign_bit_p only checks ARG1 bits within A's precision.
13978 If <sign bit of A> has wider type than A, bits outside
13979 of A's precision in <sign bit of A> need to be checked.
13980 If they are all 0, this optimization needs to be done
13981 in unsigned A's type, if they are all 1 in signed A's type,
13982 otherwise this can't be done. */
13983 if (TYPE_PRECISION (TREE_TYPE (tem))
13984 < TYPE_PRECISION (TREE_TYPE (arg1))
13985 && TYPE_PRECISION (TREE_TYPE (tem))
13986 < TYPE_PRECISION (type))
13988 unsigned HOST_WIDE_INT mask_lo;
13989 HOST_WIDE_INT mask_hi;
13990 int inner_width, outer_width;
13991 tree tem_type;
13993 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13994 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13995 if (outer_width > TYPE_PRECISION (type))
13996 outer_width = TYPE_PRECISION (type);
13998 if (outer_width > HOST_BITS_PER_WIDE_INT)
14000 mask_hi = ((unsigned HOST_WIDE_INT) -1
14001 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14002 mask_lo = -1;
14004 else
14006 mask_hi = 0;
14007 mask_lo = ((unsigned HOST_WIDE_INT) -1
14008 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14010 if (inner_width > HOST_BITS_PER_WIDE_INT)
14012 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
14013 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14014 mask_lo = 0;
14016 else
14017 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
14018 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14020 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14021 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14023 tem_type = signed_type_for (TREE_TYPE (tem));
14024 tem = fold_convert_loc (loc, tem_type, tem);
14026 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14027 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14029 tem_type = unsigned_type_for (TREE_TYPE (tem));
14030 tem = fold_convert_loc (loc, tem_type, tem);
14032 else
14033 tem = NULL;
14036 if (tem)
14037 return
14038 fold_convert_loc (loc, type,
14039 fold_build2_loc (loc, BIT_AND_EXPR,
14040 TREE_TYPE (tem), tem,
14041 fold_convert_loc (loc,
14042 TREE_TYPE (tem),
14043 arg1)));
14046 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14047 already handled above. */
14048 if (TREE_CODE (arg0) == BIT_AND_EXPR
14049 && integer_onep (TREE_OPERAND (arg0, 1))
14050 && integer_zerop (op2)
14051 && integer_pow2p (arg1))
14053 tree tem = TREE_OPERAND (arg0, 0);
14054 STRIP_NOPS (tem);
14055 if (TREE_CODE (tem) == RSHIFT_EXPR
14056 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14057 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14058 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14059 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14060 TREE_OPERAND (tem, 0), arg1);
14063 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14064 is probably obsolete because the first operand should be a
14065 truth value (that's why we have the two cases above), but let's
14066 leave it in until we can confirm this for all front-ends. */
14067 if (integer_zerop (op2)
14068 && TREE_CODE (arg0) == NE_EXPR
14069 && integer_zerop (TREE_OPERAND (arg0, 1))
14070 && integer_pow2p (arg1)
14071 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14072 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14073 arg1, OEP_ONLY_CONST))
14074 return pedantic_non_lvalue_loc (loc,
14075 fold_convert_loc (loc, type,
14076 TREE_OPERAND (arg0, 0)));
14078 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14079 if (integer_zerop (op2)
14080 && truth_value_p (TREE_CODE (arg0))
14081 && truth_value_p (TREE_CODE (arg1)))
14082 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14083 fold_convert_loc (loc, type, arg0),
14084 arg1);
14086 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14087 if (integer_onep (op2)
14088 && truth_value_p (TREE_CODE (arg0))
14089 && truth_value_p (TREE_CODE (arg1)))
14091 location_t loc0 = expr_location_or (arg0, loc);
14092 /* Only perform transformation if ARG0 is easily inverted. */
14093 tem = fold_truth_not_expr (loc0, arg0);
14094 if (tem)
14095 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14096 fold_convert_loc (loc, type, tem),
14097 arg1);
14100 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14101 if (integer_zerop (arg1)
14102 && truth_value_p (TREE_CODE (arg0))
14103 && truth_value_p (TREE_CODE (op2)))
14105 location_t loc0 = expr_location_or (arg0, loc);
14106 /* Only perform transformation if ARG0 is easily inverted. */
14107 tem = fold_truth_not_expr (loc0, arg0);
14108 if (tem)
14109 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14110 fold_convert_loc (loc, type, tem),
14111 op2);
14114 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14115 if (integer_onep (arg1)
14116 && truth_value_p (TREE_CODE (arg0))
14117 && truth_value_p (TREE_CODE (op2)))
14118 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14119 fold_convert_loc (loc, type, arg0),
14120 op2);
14122 return NULL_TREE;
14124 case VEC_COND_EXPR:
14125 if (TREE_CODE (arg0) == VECTOR_CST)
14127 if (integer_all_onesp (arg0) && !TREE_SIDE_EFFECTS (op2))
14128 return pedantic_non_lvalue_loc (loc, op1);
14129 if (integer_zerop (arg0) && !TREE_SIDE_EFFECTS (op1))
14130 return pedantic_non_lvalue_loc (loc, op2);
14132 return NULL_TREE;
14134 case CALL_EXPR:
14135 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14136 of fold_ternary on them. */
14137 gcc_unreachable ();
14139 case BIT_FIELD_REF:
14140 if ((TREE_CODE (arg0) == VECTOR_CST
14141 || (TREE_CODE (arg0) == CONSTRUCTOR
14142 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14143 && (type == TREE_TYPE (TREE_TYPE (arg0))
14144 || (TREE_CODE (type) == VECTOR_TYPE
14145 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14147 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14148 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14149 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14150 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14152 if (n != 0
14153 && (idx % width) == 0
14154 && (n % width) == 0
14155 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14157 idx = idx / width;
14158 n = n / width;
14160 if (TREE_CODE (arg0) == VECTOR_CST)
14162 if (n == 1)
14163 return VECTOR_CST_ELT (arg0, idx);
14165 tree *vals = XALLOCAVEC (tree, n);
14166 for (unsigned i = 0; i < n; ++i)
14167 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14168 return build_vector (type, vals);
14171 /* Constructor elements can be subvectors. */
14172 unsigned HOST_WIDE_INT k = 1;
14173 if (CONSTRUCTOR_NELTS (arg0) != 0)
14175 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14176 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14177 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14180 /* We keep an exact subset of the constructor elements. */
14181 if ((idx % k) == 0 && (n % k) == 0)
14183 if (CONSTRUCTOR_NELTS (arg0) == 0)
14184 return build_constructor (type, NULL);
14185 idx /= k;
14186 n /= k;
14187 if (n == 1)
14189 if (idx < CONSTRUCTOR_NELTS (arg0))
14190 return CONSTRUCTOR_ELT (arg0, idx)->value;
14191 return build_zero_cst (type);
14194 vec<constructor_elt, va_gc> *vals;
14195 vec_alloc (vals, n);
14196 for (unsigned i = 0;
14197 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14198 ++i)
14199 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14200 CONSTRUCTOR_ELT
14201 (arg0, idx + i)->value);
14202 return build_constructor (type, vals);
14204 /* The bitfield references a single constructor element. */
14205 else if (idx + n <= (idx / k + 1) * k)
14207 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14208 return build_zero_cst (type);
14209 else if (n == k)
14210 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14211 else
14212 return fold_build3_loc (loc, code, type,
14213 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14214 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14219 /* A bit-field-ref that referenced the full argument can be stripped. */
14220 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14221 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14222 && integer_zerop (op2))
14223 return fold_convert_loc (loc, type, arg0);
14225 /* On constants we can use native encode/interpret to constant
14226 fold (nearly) all BIT_FIELD_REFs. */
14227 if (CONSTANT_CLASS_P (arg0)
14228 && can_native_interpret_type_p (type)
14229 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14230 /* This limitation should not be necessary, we just need to
14231 round this up to mode size. */
14232 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14233 /* Need bit-shifting of the buffer to relax the following. */
14234 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14236 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14237 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14238 unsigned HOST_WIDE_INT clen;
14239 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14240 /* ??? We cannot tell native_encode_expr to start at
14241 some random byte only. So limit us to a reasonable amount
14242 of work. */
14243 if (clen <= 4096)
14245 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14246 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14247 if (len > 0
14248 && len * BITS_PER_UNIT >= bitpos + bitsize)
14250 tree v = native_interpret_expr (type,
14251 b + bitpos / BITS_PER_UNIT,
14252 bitsize / BITS_PER_UNIT);
14253 if (v)
14254 return v;
14259 return NULL_TREE;
14261 case FMA_EXPR:
14262 /* For integers we can decompose the FMA if possible. */
14263 if (TREE_CODE (arg0) == INTEGER_CST
14264 && TREE_CODE (arg1) == INTEGER_CST)
14265 return fold_build2_loc (loc, PLUS_EXPR, type,
14266 const_binop (MULT_EXPR, arg0, arg1), arg2);
14267 if (integer_zerop (arg2))
14268 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14270 return fold_fma (loc, type, arg0, arg1, arg2);
14272 case VEC_PERM_EXPR:
14273 if (TREE_CODE (arg2) == VECTOR_CST)
14275 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14276 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14277 tree t;
14278 bool need_mask_canon = false;
14279 bool all_in_vec0 = true;
14280 bool all_in_vec1 = true;
14281 bool maybe_identity = true;
14282 bool single_arg = (op0 == op1);
14283 bool changed = false;
14285 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14286 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14287 for (i = 0; i < nelts; i++)
14289 tree val = VECTOR_CST_ELT (arg2, i);
14290 if (TREE_CODE (val) != INTEGER_CST)
14291 return NULL_TREE;
14293 sel[i] = TREE_INT_CST_LOW (val) & mask;
14294 if (TREE_INT_CST_HIGH (val)
14295 || ((unsigned HOST_WIDE_INT)
14296 TREE_INT_CST_LOW (val) != sel[i]))
14297 need_mask_canon = true;
14299 if (sel[i] < nelts)
14300 all_in_vec1 = false;
14301 else
14302 all_in_vec0 = false;
14304 if ((sel[i] & (nelts-1)) != i)
14305 maybe_identity = false;
14308 if (maybe_identity)
14310 if (all_in_vec0)
14311 return op0;
14312 if (all_in_vec1)
14313 return op1;
14316 if (all_in_vec0)
14317 op1 = op0;
14318 else if (all_in_vec1)
14320 op0 = op1;
14321 for (i = 0; i < nelts; i++)
14322 sel[i] -= nelts;
14323 need_mask_canon = true;
14326 if ((TREE_CODE (op0) == VECTOR_CST
14327 || TREE_CODE (op0) == CONSTRUCTOR)
14328 && (TREE_CODE (op1) == VECTOR_CST
14329 || TREE_CODE (op1) == CONSTRUCTOR))
14331 t = fold_vec_perm (type, op0, op1, sel);
14332 if (t != NULL_TREE)
14333 return t;
14336 if (op0 == op1 && !single_arg)
14337 changed = true;
14339 if (need_mask_canon && arg2 == op2)
14341 tree *tsel = XALLOCAVEC (tree, nelts);
14342 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14343 for (i = 0; i < nelts; i++)
14344 tsel[i] = build_int_cst (eltype, sel[i]);
14345 op2 = build_vector (TREE_TYPE (arg2), tsel);
14346 changed = true;
14349 if (changed)
14350 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14352 return NULL_TREE;
14354 default:
14355 return NULL_TREE;
14356 } /* switch (code) */
14359 /* Perform constant folding and related simplification of EXPR.
14360 The related simplifications include x*1 => x, x*0 => 0, etc.,
14361 and application of the associative law.
14362 NOP_EXPR conversions may be removed freely (as long as we
14363 are careful not to change the type of the overall expression).
14364 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14365 but we can constant-fold them if they have constant operands. */
14367 #ifdef ENABLE_FOLD_CHECKING
14368 # define fold(x) fold_1 (x)
14369 static tree fold_1 (tree);
14370 static
14371 #endif
14372 tree
14373 fold (tree expr)
14375 const tree t = expr;
14376 enum tree_code code = TREE_CODE (t);
14377 enum tree_code_class kind = TREE_CODE_CLASS (code);
14378 tree tem;
14379 location_t loc = EXPR_LOCATION (expr);
14381 /* Return right away if a constant. */
14382 if (kind == tcc_constant)
14383 return t;
14385 /* CALL_EXPR-like objects with variable numbers of operands are
14386 treated specially. */
14387 if (kind == tcc_vl_exp)
14389 if (code == CALL_EXPR)
14391 tem = fold_call_expr (loc, expr, false);
14392 return tem ? tem : expr;
14394 return expr;
14397 if (IS_EXPR_CODE_CLASS (kind))
14399 tree type = TREE_TYPE (t);
14400 tree op0, op1, op2;
14402 switch (TREE_CODE_LENGTH (code))
14404 case 1:
14405 op0 = TREE_OPERAND (t, 0);
14406 tem = fold_unary_loc (loc, code, type, op0);
14407 return tem ? tem : expr;
14408 case 2:
14409 op0 = TREE_OPERAND (t, 0);
14410 op1 = TREE_OPERAND (t, 1);
14411 tem = fold_binary_loc (loc, code, type, op0, op1);
14412 return tem ? tem : expr;
14413 case 3:
14414 op0 = TREE_OPERAND (t, 0);
14415 op1 = TREE_OPERAND (t, 1);
14416 op2 = TREE_OPERAND (t, 2);
14417 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14418 return tem ? tem : expr;
14419 default:
14420 break;
14424 switch (code)
14426 case ARRAY_REF:
14428 tree op0 = TREE_OPERAND (t, 0);
14429 tree op1 = TREE_OPERAND (t, 1);
14431 if (TREE_CODE (op1) == INTEGER_CST
14432 && TREE_CODE (op0) == CONSTRUCTOR
14433 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14435 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14436 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14437 unsigned HOST_WIDE_INT begin = 0;
14439 /* Find a matching index by means of a binary search. */
14440 while (begin != end)
14442 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14443 tree index = (*elts)[middle].index;
14445 if (TREE_CODE (index) == INTEGER_CST
14446 && tree_int_cst_lt (index, op1))
14447 begin = middle + 1;
14448 else if (TREE_CODE (index) == INTEGER_CST
14449 && tree_int_cst_lt (op1, index))
14450 end = middle;
14451 else if (TREE_CODE (index) == RANGE_EXPR
14452 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14453 begin = middle + 1;
14454 else if (TREE_CODE (index) == RANGE_EXPR
14455 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14456 end = middle;
14457 else
14458 return (*elts)[middle].value;
14462 return t;
14465 /* Return a VECTOR_CST if possible. */
14466 case CONSTRUCTOR:
14468 tree type = TREE_TYPE (t);
14469 if (TREE_CODE (type) != VECTOR_TYPE)
14470 return t;
14472 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14473 unsigned HOST_WIDE_INT idx, pos = 0;
14474 tree value;
14476 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14478 if (!CONSTANT_CLASS_P (value))
14479 return t;
14480 if (TREE_CODE (value) == VECTOR_CST)
14482 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14483 vec[pos++] = VECTOR_CST_ELT (value, i);
14485 else
14486 vec[pos++] = value;
14488 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14489 vec[pos] = build_zero_cst (TREE_TYPE (type));
14491 return build_vector (type, vec);
14494 case CONST_DECL:
14495 return fold (DECL_INITIAL (t));
14497 default:
14498 return t;
14499 } /* switch (code) */
14502 #ifdef ENABLE_FOLD_CHECKING
14503 #undef fold
14505 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14506 hash_table <pointer_hash <tree_node> >);
14507 static void fold_check_failed (const_tree, const_tree);
14508 void print_fold_checksum (const_tree);
14510 /* When --enable-checking=fold, compute a digest of expr before
14511 and after actual fold call to see if fold did not accidentally
14512 change original expr. */
14514 tree
14515 fold (tree expr)
14517 tree ret;
14518 struct md5_ctx ctx;
14519 unsigned char checksum_before[16], checksum_after[16];
14520 hash_table <pointer_hash <tree_node> > ht;
14522 ht.create (32);
14523 md5_init_ctx (&ctx);
14524 fold_checksum_tree (expr, &ctx, ht);
14525 md5_finish_ctx (&ctx, checksum_before);
14526 ht.empty ();
14528 ret = fold_1 (expr);
14530 md5_init_ctx (&ctx);
14531 fold_checksum_tree (expr, &ctx, ht);
14532 md5_finish_ctx (&ctx, checksum_after);
14533 ht.dispose ();
14535 if (memcmp (checksum_before, checksum_after, 16))
14536 fold_check_failed (expr, ret);
14538 return ret;
14541 void
14542 print_fold_checksum (const_tree expr)
14544 struct md5_ctx ctx;
14545 unsigned char checksum[16], cnt;
14546 hash_table <pointer_hash <tree_node> > ht;
14548 ht.create (32);
14549 md5_init_ctx (&ctx);
14550 fold_checksum_tree (expr, &ctx, ht);
14551 md5_finish_ctx (&ctx, checksum);
14552 ht.dispose ();
14553 for (cnt = 0; cnt < 16; ++cnt)
14554 fprintf (stderr, "%02x", checksum[cnt]);
14555 putc ('\n', stderr);
14558 static void
14559 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14561 internal_error ("fold check: original tree changed by fold");
14564 static void
14565 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14566 hash_table <pointer_hash <tree_node> > ht)
14568 tree_node **slot;
14569 enum tree_code code;
14570 union tree_node buf;
14571 int i, len;
14573 recursive_label:
14574 if (expr == NULL)
14575 return;
14576 slot = ht.find_slot (expr, INSERT);
14577 if (*slot != NULL)
14578 return;
14579 *slot = CONST_CAST_TREE (expr);
14580 code = TREE_CODE (expr);
14581 if (TREE_CODE_CLASS (code) == tcc_declaration
14582 && DECL_ASSEMBLER_NAME_SET_P (expr))
14584 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14585 memcpy ((char *) &buf, expr, tree_size (expr));
14586 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14587 expr = (tree) &buf;
14589 else if (TREE_CODE_CLASS (code) == tcc_type
14590 && (TYPE_POINTER_TO (expr)
14591 || TYPE_REFERENCE_TO (expr)
14592 || TYPE_CACHED_VALUES_P (expr)
14593 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14594 || TYPE_NEXT_VARIANT (expr)))
14596 /* Allow these fields to be modified. */
14597 tree tmp;
14598 memcpy ((char *) &buf, expr, tree_size (expr));
14599 expr = tmp = (tree) &buf;
14600 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14601 TYPE_POINTER_TO (tmp) = NULL;
14602 TYPE_REFERENCE_TO (tmp) = NULL;
14603 TYPE_NEXT_VARIANT (tmp) = NULL;
14604 if (TYPE_CACHED_VALUES_P (tmp))
14606 TYPE_CACHED_VALUES_P (tmp) = 0;
14607 TYPE_CACHED_VALUES (tmp) = NULL;
14610 md5_process_bytes (expr, tree_size (expr), ctx);
14611 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14612 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14613 if (TREE_CODE_CLASS (code) != tcc_type
14614 && TREE_CODE_CLASS (code) != tcc_declaration
14615 && code != TREE_LIST
14616 && code != SSA_NAME
14617 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14618 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14619 switch (TREE_CODE_CLASS (code))
14621 case tcc_constant:
14622 switch (code)
14624 case STRING_CST:
14625 md5_process_bytes (TREE_STRING_POINTER (expr),
14626 TREE_STRING_LENGTH (expr), ctx);
14627 break;
14628 case COMPLEX_CST:
14629 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14630 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14631 break;
14632 case VECTOR_CST:
14633 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14634 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14635 break;
14636 default:
14637 break;
14639 break;
14640 case tcc_exceptional:
14641 switch (code)
14643 case TREE_LIST:
14644 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14645 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14646 expr = TREE_CHAIN (expr);
14647 goto recursive_label;
14648 break;
14649 case TREE_VEC:
14650 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14651 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14652 break;
14653 default:
14654 break;
14656 break;
14657 case tcc_expression:
14658 case tcc_reference:
14659 case tcc_comparison:
14660 case tcc_unary:
14661 case tcc_binary:
14662 case tcc_statement:
14663 case tcc_vl_exp:
14664 len = TREE_OPERAND_LENGTH (expr);
14665 for (i = 0; i < len; ++i)
14666 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14667 break;
14668 case tcc_declaration:
14669 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14670 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14671 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14673 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14674 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14675 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14676 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14677 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14679 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14680 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14682 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14684 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14685 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14686 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14688 break;
14689 case tcc_type:
14690 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14691 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14692 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14693 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14694 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14695 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14696 if (INTEGRAL_TYPE_P (expr)
14697 || SCALAR_FLOAT_TYPE_P (expr))
14699 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14700 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14702 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14703 if (TREE_CODE (expr) == RECORD_TYPE
14704 || TREE_CODE (expr) == UNION_TYPE
14705 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14706 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14707 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14708 break;
14709 default:
14710 break;
14714 /* Helper function for outputting the checksum of a tree T. When
14715 debugging with gdb, you can "define mynext" to be "next" followed
14716 by "call debug_fold_checksum (op0)", then just trace down till the
14717 outputs differ. */
14719 DEBUG_FUNCTION void
14720 debug_fold_checksum (const_tree t)
14722 int i;
14723 unsigned char checksum[16];
14724 struct md5_ctx ctx;
14725 hash_table <pointer_hash <tree_node> > ht;
14726 ht.create (32);
14728 md5_init_ctx (&ctx);
14729 fold_checksum_tree (t, &ctx, ht);
14730 md5_finish_ctx (&ctx, checksum);
14731 ht.empty ();
14733 for (i = 0; i < 16; i++)
14734 fprintf (stderr, "%d ", checksum[i]);
14736 fprintf (stderr, "\n");
14739 #endif
14741 /* Fold a unary tree expression with code CODE of type TYPE with an
14742 operand OP0. LOC is the location of the resulting expression.
14743 Return a folded expression if successful. Otherwise, return a tree
14744 expression with code CODE of type TYPE with an operand OP0. */
14746 tree
14747 fold_build1_stat_loc (location_t loc,
14748 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14750 tree tem;
14751 #ifdef ENABLE_FOLD_CHECKING
14752 unsigned char checksum_before[16], checksum_after[16];
14753 struct md5_ctx ctx;
14754 hash_table <pointer_hash <tree_node> > ht;
14756 ht.create (32);
14757 md5_init_ctx (&ctx);
14758 fold_checksum_tree (op0, &ctx, ht);
14759 md5_finish_ctx (&ctx, checksum_before);
14760 ht.empty ();
14761 #endif
14763 tem = fold_unary_loc (loc, code, type, op0);
14764 if (!tem)
14765 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14767 #ifdef ENABLE_FOLD_CHECKING
14768 md5_init_ctx (&ctx);
14769 fold_checksum_tree (op0, &ctx, ht);
14770 md5_finish_ctx (&ctx, checksum_after);
14771 ht.dispose ();
14773 if (memcmp (checksum_before, checksum_after, 16))
14774 fold_check_failed (op0, tem);
14775 #endif
14776 return tem;
14779 /* Fold a binary tree expression with code CODE of type TYPE with
14780 operands OP0 and OP1. LOC is the location of the resulting
14781 expression. Return a folded expression if successful. Otherwise,
14782 return a tree expression with code CODE of type TYPE with operands
14783 OP0 and OP1. */
14785 tree
14786 fold_build2_stat_loc (location_t loc,
14787 enum tree_code code, tree type, tree op0, tree op1
14788 MEM_STAT_DECL)
14790 tree tem;
14791 #ifdef ENABLE_FOLD_CHECKING
14792 unsigned char checksum_before_op0[16],
14793 checksum_before_op1[16],
14794 checksum_after_op0[16],
14795 checksum_after_op1[16];
14796 struct md5_ctx ctx;
14797 hash_table <pointer_hash <tree_node> > ht;
14799 ht.create (32);
14800 md5_init_ctx (&ctx);
14801 fold_checksum_tree (op0, &ctx, ht);
14802 md5_finish_ctx (&ctx, checksum_before_op0);
14803 ht.empty ();
14805 md5_init_ctx (&ctx);
14806 fold_checksum_tree (op1, &ctx, ht);
14807 md5_finish_ctx (&ctx, checksum_before_op1);
14808 ht.empty ();
14809 #endif
14811 tem = fold_binary_loc (loc, code, type, op0, op1);
14812 if (!tem)
14813 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14815 #ifdef ENABLE_FOLD_CHECKING
14816 md5_init_ctx (&ctx);
14817 fold_checksum_tree (op0, &ctx, ht);
14818 md5_finish_ctx (&ctx, checksum_after_op0);
14819 ht.empty ();
14821 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14822 fold_check_failed (op0, tem);
14824 md5_init_ctx (&ctx);
14825 fold_checksum_tree (op1, &ctx, ht);
14826 md5_finish_ctx (&ctx, checksum_after_op1);
14827 ht.dispose ();
14829 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14830 fold_check_failed (op1, tem);
14831 #endif
14832 return tem;
14835 /* Fold a ternary tree expression with code CODE of type TYPE with
14836 operands OP0, OP1, and OP2. Return a folded expression if
14837 successful. Otherwise, return a tree expression with code CODE of
14838 type TYPE with operands OP0, OP1, and OP2. */
14840 tree
14841 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14842 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14844 tree tem;
14845 #ifdef ENABLE_FOLD_CHECKING
14846 unsigned char checksum_before_op0[16],
14847 checksum_before_op1[16],
14848 checksum_before_op2[16],
14849 checksum_after_op0[16],
14850 checksum_after_op1[16],
14851 checksum_after_op2[16];
14852 struct md5_ctx ctx;
14853 hash_table <pointer_hash <tree_node> > ht;
14855 ht.create (32);
14856 md5_init_ctx (&ctx);
14857 fold_checksum_tree (op0, &ctx, ht);
14858 md5_finish_ctx (&ctx, checksum_before_op0);
14859 ht.empty ();
14861 md5_init_ctx (&ctx);
14862 fold_checksum_tree (op1, &ctx, ht);
14863 md5_finish_ctx (&ctx, checksum_before_op1);
14864 ht.empty ();
14866 md5_init_ctx (&ctx);
14867 fold_checksum_tree (op2, &ctx, ht);
14868 md5_finish_ctx (&ctx, checksum_before_op2);
14869 ht.empty ();
14870 #endif
14872 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14873 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14874 if (!tem)
14875 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14877 #ifdef ENABLE_FOLD_CHECKING
14878 md5_init_ctx (&ctx);
14879 fold_checksum_tree (op0, &ctx, ht);
14880 md5_finish_ctx (&ctx, checksum_after_op0);
14881 ht.empty ();
14883 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14884 fold_check_failed (op0, tem);
14886 md5_init_ctx (&ctx);
14887 fold_checksum_tree (op1, &ctx, ht);
14888 md5_finish_ctx (&ctx, checksum_after_op1);
14889 ht.empty ();
14891 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14892 fold_check_failed (op1, tem);
14894 md5_init_ctx (&ctx);
14895 fold_checksum_tree (op2, &ctx, ht);
14896 md5_finish_ctx (&ctx, checksum_after_op2);
14897 ht.dispose ();
14899 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14900 fold_check_failed (op2, tem);
14901 #endif
14902 return tem;
14905 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14906 arguments in ARGARRAY, and a null static chain.
14907 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14908 of type TYPE from the given operands as constructed by build_call_array. */
14910 tree
14911 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14912 int nargs, tree *argarray)
14914 tree tem;
14915 #ifdef ENABLE_FOLD_CHECKING
14916 unsigned char checksum_before_fn[16],
14917 checksum_before_arglist[16],
14918 checksum_after_fn[16],
14919 checksum_after_arglist[16];
14920 struct md5_ctx ctx;
14921 hash_table <pointer_hash <tree_node> > ht;
14922 int i;
14924 ht.create (32);
14925 md5_init_ctx (&ctx);
14926 fold_checksum_tree (fn, &ctx, ht);
14927 md5_finish_ctx (&ctx, checksum_before_fn);
14928 ht.empty ();
14930 md5_init_ctx (&ctx);
14931 for (i = 0; i < nargs; i++)
14932 fold_checksum_tree (argarray[i], &ctx, ht);
14933 md5_finish_ctx (&ctx, checksum_before_arglist);
14934 ht.empty ();
14935 #endif
14937 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14939 #ifdef ENABLE_FOLD_CHECKING
14940 md5_init_ctx (&ctx);
14941 fold_checksum_tree (fn, &ctx, ht);
14942 md5_finish_ctx (&ctx, checksum_after_fn);
14943 ht.empty ();
14945 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14946 fold_check_failed (fn, tem);
14948 md5_init_ctx (&ctx);
14949 for (i = 0; i < nargs; i++)
14950 fold_checksum_tree (argarray[i], &ctx, ht);
14951 md5_finish_ctx (&ctx, checksum_after_arglist);
14952 ht.dispose ();
14954 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14955 fold_check_failed (NULL_TREE, tem);
14956 #endif
14957 return tem;
14960 /* Perform constant folding and related simplification of initializer
14961 expression EXPR. These behave identically to "fold_buildN" but ignore
14962 potential run-time traps and exceptions that fold must preserve. */
14964 #define START_FOLD_INIT \
14965 int saved_signaling_nans = flag_signaling_nans;\
14966 int saved_trapping_math = flag_trapping_math;\
14967 int saved_rounding_math = flag_rounding_math;\
14968 int saved_trapv = flag_trapv;\
14969 int saved_folding_initializer = folding_initializer;\
14970 flag_signaling_nans = 0;\
14971 flag_trapping_math = 0;\
14972 flag_rounding_math = 0;\
14973 flag_trapv = 0;\
14974 folding_initializer = 1;
14976 #define END_FOLD_INIT \
14977 flag_signaling_nans = saved_signaling_nans;\
14978 flag_trapping_math = saved_trapping_math;\
14979 flag_rounding_math = saved_rounding_math;\
14980 flag_trapv = saved_trapv;\
14981 folding_initializer = saved_folding_initializer;
14983 tree
14984 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14985 tree type, tree op)
14987 tree result;
14988 START_FOLD_INIT;
14990 result = fold_build1_loc (loc, code, type, op);
14992 END_FOLD_INIT;
14993 return result;
14996 tree
14997 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14998 tree type, tree op0, tree op1)
15000 tree result;
15001 START_FOLD_INIT;
15003 result = fold_build2_loc (loc, code, type, op0, op1);
15005 END_FOLD_INIT;
15006 return result;
15009 tree
15010 fold_build3_initializer_loc (location_t loc, enum tree_code code,
15011 tree type, tree op0, tree op1, tree op2)
15013 tree result;
15014 START_FOLD_INIT;
15016 result = fold_build3_loc (loc, code, type, op0, op1, op2);
15018 END_FOLD_INIT;
15019 return result;
15022 tree
15023 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15024 int nargs, tree *argarray)
15026 tree result;
15027 START_FOLD_INIT;
15029 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15031 END_FOLD_INIT;
15032 return result;
15035 #undef START_FOLD_INIT
15036 #undef END_FOLD_INIT
15038 /* Determine if first argument is a multiple of second argument. Return 0 if
15039 it is not, or we cannot easily determined it to be.
15041 An example of the sort of thing we care about (at this point; this routine
15042 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15043 fold cases do now) is discovering that
15045 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15047 is a multiple of
15049 SAVE_EXPR (J * 8)
15051 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15053 This code also handles discovering that
15055 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15057 is a multiple of 8 so we don't have to worry about dealing with a
15058 possible remainder.
15060 Note that we *look* inside a SAVE_EXPR only to determine how it was
15061 calculated; it is not safe for fold to do much of anything else with the
15062 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15063 at run time. For example, the latter example above *cannot* be implemented
15064 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15065 evaluation time of the original SAVE_EXPR is not necessarily the same at
15066 the time the new expression is evaluated. The only optimization of this
15067 sort that would be valid is changing
15069 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15071 divided by 8 to
15073 SAVE_EXPR (I) * SAVE_EXPR (J)
15075 (where the same SAVE_EXPR (J) is used in the original and the
15076 transformed version). */
15079 multiple_of_p (tree type, const_tree top, const_tree bottom)
15081 if (operand_equal_p (top, bottom, 0))
15082 return 1;
15084 if (TREE_CODE (type) != INTEGER_TYPE)
15085 return 0;
15087 switch (TREE_CODE (top))
15089 case BIT_AND_EXPR:
15090 /* Bitwise and provides a power of two multiple. If the mask is
15091 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15092 if (!integer_pow2p (bottom))
15093 return 0;
15094 /* FALLTHRU */
15096 case MULT_EXPR:
15097 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15098 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15100 case PLUS_EXPR:
15101 case MINUS_EXPR:
15102 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15103 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15105 case LSHIFT_EXPR:
15106 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15108 tree op1, t1;
15110 op1 = TREE_OPERAND (top, 1);
15111 /* const_binop may not detect overflow correctly,
15112 so check for it explicitly here. */
15113 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15114 > TREE_INT_CST_LOW (op1)
15115 && TREE_INT_CST_HIGH (op1) == 0
15116 && 0 != (t1 = fold_convert (type,
15117 const_binop (LSHIFT_EXPR,
15118 size_one_node,
15119 op1)))
15120 && !TREE_OVERFLOW (t1))
15121 return multiple_of_p (type, t1, bottom);
15123 return 0;
15125 case NOP_EXPR:
15126 /* Can't handle conversions from non-integral or wider integral type. */
15127 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15128 || (TYPE_PRECISION (type)
15129 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15130 return 0;
15132 /* .. fall through ... */
15134 case SAVE_EXPR:
15135 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15137 case COND_EXPR:
15138 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15139 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15141 case INTEGER_CST:
15142 if (TREE_CODE (bottom) != INTEGER_CST
15143 || integer_zerop (bottom)
15144 || (TYPE_UNSIGNED (type)
15145 && (tree_int_cst_sgn (top) < 0
15146 || tree_int_cst_sgn (bottom) < 0)))
15147 return 0;
15148 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15149 top, bottom));
15151 default:
15152 return 0;
15156 /* Return true if CODE or TYPE is known to be non-negative. */
15158 static bool
15159 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15161 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15162 && truth_value_p (code))
15163 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15164 have a signed:1 type (where the value is -1 and 0). */
15165 return true;
15166 return false;
15169 /* Return true if (CODE OP0) is known to be non-negative. If the return
15170 value is based on the assumption that signed overflow is undefined,
15171 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15172 *STRICT_OVERFLOW_P. */
15174 bool
15175 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15176 bool *strict_overflow_p)
15178 if (TYPE_UNSIGNED (type))
15179 return true;
15181 switch (code)
15183 case ABS_EXPR:
15184 /* We can't return 1 if flag_wrapv is set because
15185 ABS_EXPR<INT_MIN> = INT_MIN. */
15186 if (!INTEGRAL_TYPE_P (type))
15187 return true;
15188 if (TYPE_OVERFLOW_UNDEFINED (type))
15190 *strict_overflow_p = true;
15191 return true;
15193 break;
15195 case NON_LVALUE_EXPR:
15196 case FLOAT_EXPR:
15197 case FIX_TRUNC_EXPR:
15198 return tree_expr_nonnegative_warnv_p (op0,
15199 strict_overflow_p);
15201 case NOP_EXPR:
15203 tree inner_type = TREE_TYPE (op0);
15204 tree outer_type = type;
15206 if (TREE_CODE (outer_type) == REAL_TYPE)
15208 if (TREE_CODE (inner_type) == REAL_TYPE)
15209 return tree_expr_nonnegative_warnv_p (op0,
15210 strict_overflow_p);
15211 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15213 if (TYPE_UNSIGNED (inner_type))
15214 return true;
15215 return tree_expr_nonnegative_warnv_p (op0,
15216 strict_overflow_p);
15219 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15221 if (TREE_CODE (inner_type) == REAL_TYPE)
15222 return tree_expr_nonnegative_warnv_p (op0,
15223 strict_overflow_p);
15224 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15225 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15226 && TYPE_UNSIGNED (inner_type);
15229 break;
15231 default:
15232 return tree_simple_nonnegative_warnv_p (code, type);
15235 /* We don't know sign of `t', so be conservative and return false. */
15236 return false;
15239 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15240 value is based on the assumption that signed overflow is undefined,
15241 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15242 *STRICT_OVERFLOW_P. */
15244 bool
15245 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15246 tree op1, bool *strict_overflow_p)
15248 if (TYPE_UNSIGNED (type))
15249 return true;
15251 switch (code)
15253 case POINTER_PLUS_EXPR:
15254 case PLUS_EXPR:
15255 if (FLOAT_TYPE_P (type))
15256 return (tree_expr_nonnegative_warnv_p (op0,
15257 strict_overflow_p)
15258 && tree_expr_nonnegative_warnv_p (op1,
15259 strict_overflow_p));
15261 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15262 both unsigned and at least 2 bits shorter than the result. */
15263 if (TREE_CODE (type) == INTEGER_TYPE
15264 && TREE_CODE (op0) == NOP_EXPR
15265 && TREE_CODE (op1) == NOP_EXPR)
15267 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15268 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15269 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15270 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15272 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15273 TYPE_PRECISION (inner2)) + 1;
15274 return prec < TYPE_PRECISION (type);
15277 break;
15279 case MULT_EXPR:
15280 if (FLOAT_TYPE_P (type))
15282 /* x * x for floating point x is always non-negative. */
15283 if (operand_equal_p (op0, op1, 0))
15284 return true;
15285 return (tree_expr_nonnegative_warnv_p (op0,
15286 strict_overflow_p)
15287 && tree_expr_nonnegative_warnv_p (op1,
15288 strict_overflow_p));
15291 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15292 both unsigned and their total bits is shorter than the result. */
15293 if (TREE_CODE (type) == INTEGER_TYPE
15294 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15295 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15297 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15298 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15299 : TREE_TYPE (op0);
15300 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15301 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15302 : TREE_TYPE (op1);
15304 bool unsigned0 = TYPE_UNSIGNED (inner0);
15305 bool unsigned1 = TYPE_UNSIGNED (inner1);
15307 if (TREE_CODE (op0) == INTEGER_CST)
15308 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15310 if (TREE_CODE (op1) == INTEGER_CST)
15311 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15313 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15314 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15316 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15317 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15318 : TYPE_PRECISION (inner0);
15320 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15321 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15322 : TYPE_PRECISION (inner1);
15324 return precision0 + precision1 < TYPE_PRECISION (type);
15327 return false;
15329 case BIT_AND_EXPR:
15330 case MAX_EXPR:
15331 return (tree_expr_nonnegative_warnv_p (op0,
15332 strict_overflow_p)
15333 || tree_expr_nonnegative_warnv_p (op1,
15334 strict_overflow_p));
15336 case BIT_IOR_EXPR:
15337 case BIT_XOR_EXPR:
15338 case MIN_EXPR:
15339 case RDIV_EXPR:
15340 case TRUNC_DIV_EXPR:
15341 case CEIL_DIV_EXPR:
15342 case FLOOR_DIV_EXPR:
15343 case ROUND_DIV_EXPR:
15344 return (tree_expr_nonnegative_warnv_p (op0,
15345 strict_overflow_p)
15346 && tree_expr_nonnegative_warnv_p (op1,
15347 strict_overflow_p));
15349 case TRUNC_MOD_EXPR:
15350 case CEIL_MOD_EXPR:
15351 case FLOOR_MOD_EXPR:
15352 case ROUND_MOD_EXPR:
15353 return tree_expr_nonnegative_warnv_p (op0,
15354 strict_overflow_p);
15355 default:
15356 return tree_simple_nonnegative_warnv_p (code, type);
15359 /* We don't know sign of `t', so be conservative and return false. */
15360 return false;
15363 /* Return true if T is known to be non-negative. If the return
15364 value is based on the assumption that signed overflow is undefined,
15365 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15366 *STRICT_OVERFLOW_P. */
15368 bool
15369 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15371 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15372 return true;
15374 switch (TREE_CODE (t))
15376 case INTEGER_CST:
15377 return tree_int_cst_sgn (t) >= 0;
15379 case REAL_CST:
15380 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15382 case FIXED_CST:
15383 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15385 case COND_EXPR:
15386 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15387 strict_overflow_p)
15388 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15389 strict_overflow_p));
15390 default:
15391 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15392 TREE_TYPE (t));
15394 /* We don't know sign of `t', so be conservative and return false. */
15395 return false;
15398 /* Return true if T is known to be non-negative. If the return
15399 value is based on the assumption that signed overflow is undefined,
15400 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15401 *STRICT_OVERFLOW_P. */
15403 bool
15404 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15405 tree arg0, tree arg1, bool *strict_overflow_p)
15407 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15408 switch (DECL_FUNCTION_CODE (fndecl))
15410 CASE_FLT_FN (BUILT_IN_ACOS):
15411 CASE_FLT_FN (BUILT_IN_ACOSH):
15412 CASE_FLT_FN (BUILT_IN_CABS):
15413 CASE_FLT_FN (BUILT_IN_COSH):
15414 CASE_FLT_FN (BUILT_IN_ERFC):
15415 CASE_FLT_FN (BUILT_IN_EXP):
15416 CASE_FLT_FN (BUILT_IN_EXP10):
15417 CASE_FLT_FN (BUILT_IN_EXP2):
15418 CASE_FLT_FN (BUILT_IN_FABS):
15419 CASE_FLT_FN (BUILT_IN_FDIM):
15420 CASE_FLT_FN (BUILT_IN_HYPOT):
15421 CASE_FLT_FN (BUILT_IN_POW10):
15422 CASE_INT_FN (BUILT_IN_FFS):
15423 CASE_INT_FN (BUILT_IN_PARITY):
15424 CASE_INT_FN (BUILT_IN_POPCOUNT):
15425 case BUILT_IN_BSWAP32:
15426 case BUILT_IN_BSWAP64:
15427 /* Always true. */
15428 return true;
15430 CASE_FLT_FN (BUILT_IN_SQRT):
15431 /* sqrt(-0.0) is -0.0. */
15432 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15433 return true;
15434 return tree_expr_nonnegative_warnv_p (arg0,
15435 strict_overflow_p);
15437 CASE_FLT_FN (BUILT_IN_ASINH):
15438 CASE_FLT_FN (BUILT_IN_ATAN):
15439 CASE_FLT_FN (BUILT_IN_ATANH):
15440 CASE_FLT_FN (BUILT_IN_CBRT):
15441 CASE_FLT_FN (BUILT_IN_CEIL):
15442 CASE_FLT_FN (BUILT_IN_ERF):
15443 CASE_FLT_FN (BUILT_IN_EXPM1):
15444 CASE_FLT_FN (BUILT_IN_FLOOR):
15445 CASE_FLT_FN (BUILT_IN_FMOD):
15446 CASE_FLT_FN (BUILT_IN_FREXP):
15447 CASE_FLT_FN (BUILT_IN_ICEIL):
15448 CASE_FLT_FN (BUILT_IN_IFLOOR):
15449 CASE_FLT_FN (BUILT_IN_IRINT):
15450 CASE_FLT_FN (BUILT_IN_IROUND):
15451 CASE_FLT_FN (BUILT_IN_LCEIL):
15452 CASE_FLT_FN (BUILT_IN_LDEXP):
15453 CASE_FLT_FN (BUILT_IN_LFLOOR):
15454 CASE_FLT_FN (BUILT_IN_LLCEIL):
15455 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15456 CASE_FLT_FN (BUILT_IN_LLRINT):
15457 CASE_FLT_FN (BUILT_IN_LLROUND):
15458 CASE_FLT_FN (BUILT_IN_LRINT):
15459 CASE_FLT_FN (BUILT_IN_LROUND):
15460 CASE_FLT_FN (BUILT_IN_MODF):
15461 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15462 CASE_FLT_FN (BUILT_IN_RINT):
15463 CASE_FLT_FN (BUILT_IN_ROUND):
15464 CASE_FLT_FN (BUILT_IN_SCALB):
15465 CASE_FLT_FN (BUILT_IN_SCALBLN):
15466 CASE_FLT_FN (BUILT_IN_SCALBN):
15467 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15468 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15469 CASE_FLT_FN (BUILT_IN_SINH):
15470 CASE_FLT_FN (BUILT_IN_TANH):
15471 CASE_FLT_FN (BUILT_IN_TRUNC):
15472 /* True if the 1st argument is nonnegative. */
15473 return tree_expr_nonnegative_warnv_p (arg0,
15474 strict_overflow_p);
15476 CASE_FLT_FN (BUILT_IN_FMAX):
15477 /* True if the 1st OR 2nd arguments are nonnegative. */
15478 return (tree_expr_nonnegative_warnv_p (arg0,
15479 strict_overflow_p)
15480 || (tree_expr_nonnegative_warnv_p (arg1,
15481 strict_overflow_p)));
15483 CASE_FLT_FN (BUILT_IN_FMIN):
15484 /* True if the 1st AND 2nd arguments are nonnegative. */
15485 return (tree_expr_nonnegative_warnv_p (arg0,
15486 strict_overflow_p)
15487 && (tree_expr_nonnegative_warnv_p (arg1,
15488 strict_overflow_p)));
15490 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15491 /* True if the 2nd argument is nonnegative. */
15492 return tree_expr_nonnegative_warnv_p (arg1,
15493 strict_overflow_p);
15495 CASE_FLT_FN (BUILT_IN_POWI):
15496 /* True if the 1st argument is nonnegative or the second
15497 argument is an even integer. */
15498 if (TREE_CODE (arg1) == INTEGER_CST
15499 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15500 return true;
15501 return tree_expr_nonnegative_warnv_p (arg0,
15502 strict_overflow_p);
15504 CASE_FLT_FN (BUILT_IN_POW):
15505 /* True if the 1st argument is nonnegative or the second
15506 argument is an even integer valued real. */
15507 if (TREE_CODE (arg1) == REAL_CST)
15509 REAL_VALUE_TYPE c;
15510 HOST_WIDE_INT n;
15512 c = TREE_REAL_CST (arg1);
15513 n = real_to_integer (&c);
15514 if ((n & 1) == 0)
15516 REAL_VALUE_TYPE cint;
15517 real_from_integer (&cint, VOIDmode, n,
15518 n < 0 ? -1 : 0, 0);
15519 if (real_identical (&c, &cint))
15520 return true;
15523 return tree_expr_nonnegative_warnv_p (arg0,
15524 strict_overflow_p);
15526 default:
15527 break;
15529 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15530 type);
15533 /* Return true if T is known to be non-negative. If the return
15534 value is based on the assumption that signed overflow is undefined,
15535 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15536 *STRICT_OVERFLOW_P. */
15538 bool
15539 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15541 enum tree_code code = TREE_CODE (t);
15542 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15543 return true;
15545 switch (code)
15547 case TARGET_EXPR:
15549 tree temp = TARGET_EXPR_SLOT (t);
15550 t = TARGET_EXPR_INITIAL (t);
15552 /* If the initializer is non-void, then it's a normal expression
15553 that will be assigned to the slot. */
15554 if (!VOID_TYPE_P (t))
15555 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15557 /* Otherwise, the initializer sets the slot in some way. One common
15558 way is an assignment statement at the end of the initializer. */
15559 while (1)
15561 if (TREE_CODE (t) == BIND_EXPR)
15562 t = expr_last (BIND_EXPR_BODY (t));
15563 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15564 || TREE_CODE (t) == TRY_CATCH_EXPR)
15565 t = expr_last (TREE_OPERAND (t, 0));
15566 else if (TREE_CODE (t) == STATEMENT_LIST)
15567 t = expr_last (t);
15568 else
15569 break;
15571 if (TREE_CODE (t) == MODIFY_EXPR
15572 && TREE_OPERAND (t, 0) == temp)
15573 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15574 strict_overflow_p);
15576 return false;
15579 case CALL_EXPR:
15581 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15582 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15584 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15585 get_callee_fndecl (t),
15586 arg0,
15587 arg1,
15588 strict_overflow_p);
15590 case COMPOUND_EXPR:
15591 case MODIFY_EXPR:
15592 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15593 strict_overflow_p);
15594 case BIND_EXPR:
15595 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15596 strict_overflow_p);
15597 case SAVE_EXPR:
15598 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15599 strict_overflow_p);
15601 default:
15602 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15603 TREE_TYPE (t));
15606 /* We don't know sign of `t', so be conservative and return false. */
15607 return false;
15610 /* Return true if T is known to be non-negative. If the return
15611 value is based on the assumption that signed overflow is undefined,
15612 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15613 *STRICT_OVERFLOW_P. */
15615 bool
15616 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15618 enum tree_code code;
15619 if (t == error_mark_node)
15620 return false;
15622 code = TREE_CODE (t);
15623 switch (TREE_CODE_CLASS (code))
15625 case tcc_binary:
15626 case tcc_comparison:
15627 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15628 TREE_TYPE (t),
15629 TREE_OPERAND (t, 0),
15630 TREE_OPERAND (t, 1),
15631 strict_overflow_p);
15633 case tcc_unary:
15634 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15635 TREE_TYPE (t),
15636 TREE_OPERAND (t, 0),
15637 strict_overflow_p);
15639 case tcc_constant:
15640 case tcc_declaration:
15641 case tcc_reference:
15642 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15644 default:
15645 break;
15648 switch (code)
15650 case TRUTH_AND_EXPR:
15651 case TRUTH_OR_EXPR:
15652 case TRUTH_XOR_EXPR:
15653 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15654 TREE_TYPE (t),
15655 TREE_OPERAND (t, 0),
15656 TREE_OPERAND (t, 1),
15657 strict_overflow_p);
15658 case TRUTH_NOT_EXPR:
15659 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15660 TREE_TYPE (t),
15661 TREE_OPERAND (t, 0),
15662 strict_overflow_p);
15664 case COND_EXPR:
15665 case CONSTRUCTOR:
15666 case OBJ_TYPE_REF:
15667 case ASSERT_EXPR:
15668 case ADDR_EXPR:
15669 case WITH_SIZE_EXPR:
15670 case SSA_NAME:
15671 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15673 default:
15674 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15678 /* Return true if `t' is known to be non-negative. Handle warnings
15679 about undefined signed overflow. */
15681 bool
15682 tree_expr_nonnegative_p (tree t)
15684 bool ret, strict_overflow_p;
15686 strict_overflow_p = false;
15687 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15688 if (strict_overflow_p)
15689 fold_overflow_warning (("assuming signed overflow does not occur when "
15690 "determining that expression is always "
15691 "non-negative"),
15692 WARN_STRICT_OVERFLOW_MISC);
15693 return ret;
15697 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15698 For floating point we further ensure that T is not denormal.
15699 Similar logic is present in nonzero_address in rtlanal.h.
15701 If the return value is based on the assumption that signed overflow
15702 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15703 change *STRICT_OVERFLOW_P. */
15705 bool
15706 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15707 bool *strict_overflow_p)
15709 switch (code)
15711 case ABS_EXPR:
15712 return tree_expr_nonzero_warnv_p (op0,
15713 strict_overflow_p);
15715 case NOP_EXPR:
15717 tree inner_type = TREE_TYPE (op0);
15718 tree outer_type = type;
15720 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15721 && tree_expr_nonzero_warnv_p (op0,
15722 strict_overflow_p));
15724 break;
15726 case NON_LVALUE_EXPR:
15727 return tree_expr_nonzero_warnv_p (op0,
15728 strict_overflow_p);
15730 default:
15731 break;
15734 return false;
15737 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15738 For floating point we further ensure that T is not denormal.
15739 Similar logic is present in nonzero_address in rtlanal.h.
15741 If the return value is based on the assumption that signed overflow
15742 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15743 change *STRICT_OVERFLOW_P. */
15745 bool
15746 tree_binary_nonzero_warnv_p (enum tree_code code,
15747 tree type,
15748 tree op0,
15749 tree op1, bool *strict_overflow_p)
15751 bool sub_strict_overflow_p;
15752 switch (code)
15754 case POINTER_PLUS_EXPR:
15755 case PLUS_EXPR:
15756 if (TYPE_OVERFLOW_UNDEFINED (type))
15758 /* With the presence of negative values it is hard
15759 to say something. */
15760 sub_strict_overflow_p = false;
15761 if (!tree_expr_nonnegative_warnv_p (op0,
15762 &sub_strict_overflow_p)
15763 || !tree_expr_nonnegative_warnv_p (op1,
15764 &sub_strict_overflow_p))
15765 return false;
15766 /* One of operands must be positive and the other non-negative. */
15767 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15768 overflows, on a twos-complement machine the sum of two
15769 nonnegative numbers can never be zero. */
15770 return (tree_expr_nonzero_warnv_p (op0,
15771 strict_overflow_p)
15772 || tree_expr_nonzero_warnv_p (op1,
15773 strict_overflow_p));
15775 break;
15777 case MULT_EXPR:
15778 if (TYPE_OVERFLOW_UNDEFINED (type))
15780 if (tree_expr_nonzero_warnv_p (op0,
15781 strict_overflow_p)
15782 && tree_expr_nonzero_warnv_p (op1,
15783 strict_overflow_p))
15785 *strict_overflow_p = true;
15786 return true;
15789 break;
15791 case MIN_EXPR:
15792 sub_strict_overflow_p = false;
15793 if (tree_expr_nonzero_warnv_p (op0,
15794 &sub_strict_overflow_p)
15795 && tree_expr_nonzero_warnv_p (op1,
15796 &sub_strict_overflow_p))
15798 if (sub_strict_overflow_p)
15799 *strict_overflow_p = true;
15801 break;
15803 case MAX_EXPR:
15804 sub_strict_overflow_p = false;
15805 if (tree_expr_nonzero_warnv_p (op0,
15806 &sub_strict_overflow_p))
15808 if (sub_strict_overflow_p)
15809 *strict_overflow_p = true;
15811 /* When both operands are nonzero, then MAX must be too. */
15812 if (tree_expr_nonzero_warnv_p (op1,
15813 strict_overflow_p))
15814 return true;
15816 /* MAX where operand 0 is positive is positive. */
15817 return tree_expr_nonnegative_warnv_p (op0,
15818 strict_overflow_p);
15820 /* MAX where operand 1 is positive is positive. */
15821 else if (tree_expr_nonzero_warnv_p (op1,
15822 &sub_strict_overflow_p)
15823 && tree_expr_nonnegative_warnv_p (op1,
15824 &sub_strict_overflow_p))
15826 if (sub_strict_overflow_p)
15827 *strict_overflow_p = true;
15828 return true;
15830 break;
15832 case BIT_IOR_EXPR:
15833 return (tree_expr_nonzero_warnv_p (op1,
15834 strict_overflow_p)
15835 || tree_expr_nonzero_warnv_p (op0,
15836 strict_overflow_p));
15838 default:
15839 break;
15842 return false;
15845 /* Return true when T is an address and is known to be nonzero.
15846 For floating point we further ensure that T is not denormal.
15847 Similar logic is present in nonzero_address in rtlanal.h.
15849 If the return value is based on the assumption that signed overflow
15850 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15851 change *STRICT_OVERFLOW_P. */
15853 bool
15854 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15856 bool sub_strict_overflow_p;
15857 switch (TREE_CODE (t))
15859 case INTEGER_CST:
15860 return !integer_zerop (t);
15862 case ADDR_EXPR:
15864 tree base = TREE_OPERAND (t, 0);
15865 if (!DECL_P (base))
15866 base = get_base_address (base);
15868 if (!base)
15869 return false;
15871 /* Weak declarations may link to NULL. Other things may also be NULL
15872 so protect with -fdelete-null-pointer-checks; but not variables
15873 allocated on the stack. */
15874 if (DECL_P (base)
15875 && (flag_delete_null_pointer_checks
15876 || (DECL_CONTEXT (base)
15877 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15878 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15879 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15881 /* Constants are never weak. */
15882 if (CONSTANT_CLASS_P (base))
15883 return true;
15885 return false;
15888 case COND_EXPR:
15889 sub_strict_overflow_p = false;
15890 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15891 &sub_strict_overflow_p)
15892 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15893 &sub_strict_overflow_p))
15895 if (sub_strict_overflow_p)
15896 *strict_overflow_p = true;
15897 return true;
15899 break;
15901 default:
15902 break;
15904 return false;
15907 /* Return true when T is an address and is known to be nonzero.
15908 For floating point we further ensure that T is not denormal.
15909 Similar logic is present in nonzero_address in rtlanal.h.
15911 If the return value is based on the assumption that signed overflow
15912 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15913 change *STRICT_OVERFLOW_P. */
15915 bool
15916 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15918 tree type = TREE_TYPE (t);
15919 enum tree_code code;
15921 /* Doing something useful for floating point would need more work. */
15922 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15923 return false;
15925 code = TREE_CODE (t);
15926 switch (TREE_CODE_CLASS (code))
15928 case tcc_unary:
15929 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15930 strict_overflow_p);
15931 case tcc_binary:
15932 case tcc_comparison:
15933 return tree_binary_nonzero_warnv_p (code, type,
15934 TREE_OPERAND (t, 0),
15935 TREE_OPERAND (t, 1),
15936 strict_overflow_p);
15937 case tcc_constant:
15938 case tcc_declaration:
15939 case tcc_reference:
15940 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15942 default:
15943 break;
15946 switch (code)
15948 case TRUTH_NOT_EXPR:
15949 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15950 strict_overflow_p);
15952 case TRUTH_AND_EXPR:
15953 case TRUTH_OR_EXPR:
15954 case TRUTH_XOR_EXPR:
15955 return tree_binary_nonzero_warnv_p (code, type,
15956 TREE_OPERAND (t, 0),
15957 TREE_OPERAND (t, 1),
15958 strict_overflow_p);
15960 case COND_EXPR:
15961 case CONSTRUCTOR:
15962 case OBJ_TYPE_REF:
15963 case ASSERT_EXPR:
15964 case ADDR_EXPR:
15965 case WITH_SIZE_EXPR:
15966 case SSA_NAME:
15967 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15969 case COMPOUND_EXPR:
15970 case MODIFY_EXPR:
15971 case BIND_EXPR:
15972 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15973 strict_overflow_p);
15975 case SAVE_EXPR:
15976 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15977 strict_overflow_p);
15979 case CALL_EXPR:
15980 return alloca_call_p (t);
15982 default:
15983 break;
15985 return false;
15988 /* Return true when T is an address and is known to be nonzero.
15989 Handle warnings about undefined signed overflow. */
15991 bool
15992 tree_expr_nonzero_p (tree t)
15994 bool ret, strict_overflow_p;
15996 strict_overflow_p = false;
15997 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15998 if (strict_overflow_p)
15999 fold_overflow_warning (("assuming signed overflow does not occur when "
16000 "determining that expression is always "
16001 "non-zero"),
16002 WARN_STRICT_OVERFLOW_MISC);
16003 return ret;
16006 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16007 attempt to fold the expression to a constant without modifying TYPE,
16008 OP0 or OP1.
16010 If the expression could be simplified to a constant, then return
16011 the constant. If the expression would not be simplified to a
16012 constant, then return NULL_TREE. */
16014 tree
16015 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16017 tree tem = fold_binary (code, type, op0, op1);
16018 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16021 /* Given the components of a unary expression CODE, TYPE and OP0,
16022 attempt to fold the expression to a constant without modifying
16023 TYPE or OP0.
16025 If the expression could be simplified to a constant, then return
16026 the constant. If the expression would not be simplified to a
16027 constant, then return NULL_TREE. */
16029 tree
16030 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16032 tree tem = fold_unary (code, type, op0);
16033 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16036 /* If EXP represents referencing an element in a constant string
16037 (either via pointer arithmetic or array indexing), return the
16038 tree representing the value accessed, otherwise return NULL. */
16040 tree
16041 fold_read_from_constant_string (tree exp)
16043 if ((TREE_CODE (exp) == INDIRECT_REF
16044 || TREE_CODE (exp) == ARRAY_REF)
16045 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16047 tree exp1 = TREE_OPERAND (exp, 0);
16048 tree index;
16049 tree string;
16050 location_t loc = EXPR_LOCATION (exp);
16052 if (TREE_CODE (exp) == INDIRECT_REF)
16053 string = string_constant (exp1, &index);
16054 else
16056 tree low_bound = array_ref_low_bound (exp);
16057 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16059 /* Optimize the special-case of a zero lower bound.
16061 We convert the low_bound to sizetype to avoid some problems
16062 with constant folding. (E.g. suppose the lower bound is 1,
16063 and its mode is QI. Without the conversion,l (ARRAY
16064 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16065 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16066 if (! integer_zerop (low_bound))
16067 index = size_diffop_loc (loc, index,
16068 fold_convert_loc (loc, sizetype, low_bound));
16070 string = exp1;
16073 if (string
16074 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16075 && TREE_CODE (string) == STRING_CST
16076 && TREE_CODE (index) == INTEGER_CST
16077 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16078 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16079 == MODE_INT)
16080 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16081 return build_int_cst_type (TREE_TYPE (exp),
16082 (TREE_STRING_POINTER (string)
16083 [TREE_INT_CST_LOW (index)]));
16085 return NULL;
16088 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16089 an integer constant, real, or fixed-point constant.
16091 TYPE is the type of the result. */
16093 static tree
16094 fold_negate_const (tree arg0, tree type)
16096 tree t = NULL_TREE;
16098 switch (TREE_CODE (arg0))
16100 case INTEGER_CST:
16102 double_int val = tree_to_double_int (arg0);
16103 bool overflow;
16104 val = val.neg_with_overflow (&overflow);
16105 t = force_fit_type_double (type, val, 1,
16106 (overflow | TREE_OVERFLOW (arg0))
16107 && !TYPE_UNSIGNED (type));
16108 break;
16111 case REAL_CST:
16112 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16113 break;
16115 case FIXED_CST:
16117 FIXED_VALUE_TYPE f;
16118 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16119 &(TREE_FIXED_CST (arg0)), NULL,
16120 TYPE_SATURATING (type));
16121 t = build_fixed (type, f);
16122 /* Propagate overflow flags. */
16123 if (overflow_p | TREE_OVERFLOW (arg0))
16124 TREE_OVERFLOW (t) = 1;
16125 break;
16128 default:
16129 gcc_unreachable ();
16132 return t;
16135 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16136 an integer constant or real constant.
16138 TYPE is the type of the result. */
16140 tree
16141 fold_abs_const (tree arg0, tree type)
16143 tree t = NULL_TREE;
16145 switch (TREE_CODE (arg0))
16147 case INTEGER_CST:
16149 double_int val = tree_to_double_int (arg0);
16151 /* If the value is unsigned or non-negative, then the absolute value
16152 is the same as the ordinary value. */
16153 if (TYPE_UNSIGNED (type)
16154 || !val.is_negative ())
16155 t = arg0;
16157 /* If the value is negative, then the absolute value is
16158 its negation. */
16159 else
16161 bool overflow;
16162 val = val.neg_with_overflow (&overflow);
16163 t = force_fit_type_double (type, val, -1,
16164 overflow | TREE_OVERFLOW (arg0));
16167 break;
16169 case REAL_CST:
16170 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16171 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16172 else
16173 t = arg0;
16174 break;
16176 default:
16177 gcc_unreachable ();
16180 return t;
16183 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16184 constant. TYPE is the type of the result. */
16186 static tree
16187 fold_not_const (const_tree arg0, tree type)
16189 double_int val;
16191 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16193 val = ~tree_to_double_int (arg0);
16194 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16197 /* Given CODE, a relational operator, the target type, TYPE and two
16198 constant operands OP0 and OP1, return the result of the
16199 relational operation. If the result is not a compile time
16200 constant, then return NULL_TREE. */
16202 static tree
16203 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16205 int result, invert;
16207 /* From here on, the only cases we handle are when the result is
16208 known to be a constant. */
16210 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16212 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16213 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16215 /* Handle the cases where either operand is a NaN. */
16216 if (real_isnan (c0) || real_isnan (c1))
16218 switch (code)
16220 case EQ_EXPR:
16221 case ORDERED_EXPR:
16222 result = 0;
16223 break;
16225 case NE_EXPR:
16226 case UNORDERED_EXPR:
16227 case UNLT_EXPR:
16228 case UNLE_EXPR:
16229 case UNGT_EXPR:
16230 case UNGE_EXPR:
16231 case UNEQ_EXPR:
16232 result = 1;
16233 break;
16235 case LT_EXPR:
16236 case LE_EXPR:
16237 case GT_EXPR:
16238 case GE_EXPR:
16239 case LTGT_EXPR:
16240 if (flag_trapping_math)
16241 return NULL_TREE;
16242 result = 0;
16243 break;
16245 default:
16246 gcc_unreachable ();
16249 return constant_boolean_node (result, type);
16252 return constant_boolean_node (real_compare (code, c0, c1), type);
16255 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16257 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16258 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16259 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16262 /* Handle equality/inequality of complex constants. */
16263 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16265 tree rcond = fold_relational_const (code, type,
16266 TREE_REALPART (op0),
16267 TREE_REALPART (op1));
16268 tree icond = fold_relational_const (code, type,
16269 TREE_IMAGPART (op0),
16270 TREE_IMAGPART (op1));
16271 if (code == EQ_EXPR)
16272 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16273 else if (code == NE_EXPR)
16274 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16275 else
16276 return NULL_TREE;
16279 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16281 unsigned count = VECTOR_CST_NELTS (op0);
16282 tree *elts = XALLOCAVEC (tree, count);
16283 gcc_assert (VECTOR_CST_NELTS (op1) == count
16284 && TYPE_VECTOR_SUBPARTS (type) == count);
16286 for (unsigned i = 0; i < count; i++)
16288 tree elem_type = TREE_TYPE (type);
16289 tree elem0 = VECTOR_CST_ELT (op0, i);
16290 tree elem1 = VECTOR_CST_ELT (op1, i);
16292 tree tem = fold_relational_const (code, elem_type,
16293 elem0, elem1);
16295 if (tem == NULL_TREE)
16296 return NULL_TREE;
16298 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16301 return build_vector (type, elts);
16304 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16306 To compute GT, swap the arguments and do LT.
16307 To compute GE, do LT and invert the result.
16308 To compute LE, swap the arguments, do LT and invert the result.
16309 To compute NE, do EQ and invert the result.
16311 Therefore, the code below must handle only EQ and LT. */
16313 if (code == LE_EXPR || code == GT_EXPR)
16315 tree tem = op0;
16316 op0 = op1;
16317 op1 = tem;
16318 code = swap_tree_comparison (code);
16321 /* Note that it is safe to invert for real values here because we
16322 have already handled the one case that it matters. */
16324 invert = 0;
16325 if (code == NE_EXPR || code == GE_EXPR)
16327 invert = 1;
16328 code = invert_tree_comparison (code, false);
16331 /* Compute a result for LT or EQ if args permit;
16332 Otherwise return T. */
16333 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16335 if (code == EQ_EXPR)
16336 result = tree_int_cst_equal (op0, op1);
16337 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16338 result = INT_CST_LT_UNSIGNED (op0, op1);
16339 else
16340 result = INT_CST_LT (op0, op1);
16342 else
16343 return NULL_TREE;
16345 if (invert)
16346 result ^= 1;
16347 return constant_boolean_node (result, type);
16350 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16351 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16352 itself. */
16354 tree
16355 fold_build_cleanup_point_expr (tree type, tree expr)
16357 /* If the expression does not have side effects then we don't have to wrap
16358 it with a cleanup point expression. */
16359 if (!TREE_SIDE_EFFECTS (expr))
16360 return expr;
16362 /* If the expression is a return, check to see if the expression inside the
16363 return has no side effects or the right hand side of the modify expression
16364 inside the return. If either don't have side effects set we don't need to
16365 wrap the expression in a cleanup point expression. Note we don't check the
16366 left hand side of the modify because it should always be a return decl. */
16367 if (TREE_CODE (expr) == RETURN_EXPR)
16369 tree op = TREE_OPERAND (expr, 0);
16370 if (!op || !TREE_SIDE_EFFECTS (op))
16371 return expr;
16372 op = TREE_OPERAND (op, 1);
16373 if (!TREE_SIDE_EFFECTS (op))
16374 return expr;
16377 return build1 (CLEANUP_POINT_EXPR, type, expr);
16380 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16381 of an indirection through OP0, or NULL_TREE if no simplification is
16382 possible. */
16384 tree
16385 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16387 tree sub = op0;
16388 tree subtype;
16390 STRIP_NOPS (sub);
16391 subtype = TREE_TYPE (sub);
16392 if (!POINTER_TYPE_P (subtype))
16393 return NULL_TREE;
16395 if (TREE_CODE (sub) == ADDR_EXPR)
16397 tree op = TREE_OPERAND (sub, 0);
16398 tree optype = TREE_TYPE (op);
16399 /* *&CONST_DECL -> to the value of the const decl. */
16400 if (TREE_CODE (op) == CONST_DECL)
16401 return DECL_INITIAL (op);
16402 /* *&p => p; make sure to handle *&"str"[cst] here. */
16403 if (type == optype)
16405 tree fop = fold_read_from_constant_string (op);
16406 if (fop)
16407 return fop;
16408 else
16409 return op;
16411 /* *(foo *)&fooarray => fooarray[0] */
16412 else if (TREE_CODE (optype) == ARRAY_TYPE
16413 && type == TREE_TYPE (optype)
16414 && (!in_gimple_form
16415 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16417 tree type_domain = TYPE_DOMAIN (optype);
16418 tree min_val = size_zero_node;
16419 if (type_domain && TYPE_MIN_VALUE (type_domain))
16420 min_val = TYPE_MIN_VALUE (type_domain);
16421 if (in_gimple_form
16422 && TREE_CODE (min_val) != INTEGER_CST)
16423 return NULL_TREE;
16424 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16425 NULL_TREE, NULL_TREE);
16427 /* *(foo *)&complexfoo => __real__ complexfoo */
16428 else if (TREE_CODE (optype) == COMPLEX_TYPE
16429 && type == TREE_TYPE (optype))
16430 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16431 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16432 else if (TREE_CODE (optype) == VECTOR_TYPE
16433 && type == TREE_TYPE (optype))
16435 tree part_width = TYPE_SIZE (type);
16436 tree index = bitsize_int (0);
16437 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16441 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16442 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16444 tree op00 = TREE_OPERAND (sub, 0);
16445 tree op01 = TREE_OPERAND (sub, 1);
16447 STRIP_NOPS (op00);
16448 if (TREE_CODE (op00) == ADDR_EXPR)
16450 tree op00type;
16451 op00 = TREE_OPERAND (op00, 0);
16452 op00type = TREE_TYPE (op00);
16454 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16455 if (TREE_CODE (op00type) == VECTOR_TYPE
16456 && type == TREE_TYPE (op00type))
16458 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16459 tree part_width = TYPE_SIZE (type);
16460 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16461 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16462 tree index = bitsize_int (indexi);
16464 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16465 return fold_build3_loc (loc,
16466 BIT_FIELD_REF, type, op00,
16467 part_width, index);
16470 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16471 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16472 && type == TREE_TYPE (op00type))
16474 tree size = TYPE_SIZE_UNIT (type);
16475 if (tree_int_cst_equal (size, op01))
16476 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16478 /* ((foo *)&fooarray)[1] => fooarray[1] */
16479 else if (TREE_CODE (op00type) == ARRAY_TYPE
16480 && type == TREE_TYPE (op00type))
16482 tree type_domain = TYPE_DOMAIN (op00type);
16483 tree min_val = size_zero_node;
16484 if (type_domain && TYPE_MIN_VALUE (type_domain))
16485 min_val = TYPE_MIN_VALUE (type_domain);
16486 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16487 TYPE_SIZE_UNIT (type));
16488 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16489 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16490 NULL_TREE, NULL_TREE);
16495 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16496 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16497 && type == TREE_TYPE (TREE_TYPE (subtype))
16498 && (!in_gimple_form
16499 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16501 tree type_domain;
16502 tree min_val = size_zero_node;
16503 sub = build_fold_indirect_ref_loc (loc, sub);
16504 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16505 if (type_domain && TYPE_MIN_VALUE (type_domain))
16506 min_val = TYPE_MIN_VALUE (type_domain);
16507 if (in_gimple_form
16508 && TREE_CODE (min_val) != INTEGER_CST)
16509 return NULL_TREE;
16510 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16511 NULL_TREE);
16514 return NULL_TREE;
16517 /* Builds an expression for an indirection through T, simplifying some
16518 cases. */
16520 tree
16521 build_fold_indirect_ref_loc (location_t loc, tree t)
16523 tree type = TREE_TYPE (TREE_TYPE (t));
16524 tree sub = fold_indirect_ref_1 (loc, type, t);
16526 if (sub)
16527 return sub;
16529 return build1_loc (loc, INDIRECT_REF, type, t);
16532 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16534 tree
16535 fold_indirect_ref_loc (location_t loc, tree t)
16537 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16539 if (sub)
16540 return sub;
16541 else
16542 return t;
16545 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16546 whose result is ignored. The type of the returned tree need not be
16547 the same as the original expression. */
16549 tree
16550 fold_ignored_result (tree t)
16552 if (!TREE_SIDE_EFFECTS (t))
16553 return integer_zero_node;
16555 for (;;)
16556 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16558 case tcc_unary:
16559 t = TREE_OPERAND (t, 0);
16560 break;
16562 case tcc_binary:
16563 case tcc_comparison:
16564 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16565 t = TREE_OPERAND (t, 0);
16566 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16567 t = TREE_OPERAND (t, 1);
16568 else
16569 return t;
16570 break;
16572 case tcc_expression:
16573 switch (TREE_CODE (t))
16575 case COMPOUND_EXPR:
16576 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16577 return t;
16578 t = TREE_OPERAND (t, 0);
16579 break;
16581 case COND_EXPR:
16582 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16583 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16584 return t;
16585 t = TREE_OPERAND (t, 0);
16586 break;
16588 default:
16589 return t;
16591 break;
16593 default:
16594 return t;
16598 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16599 This can only be applied to objects of a sizetype. */
16601 tree
16602 round_up_loc (location_t loc, tree value, int divisor)
16604 tree div = NULL_TREE;
16606 gcc_assert (divisor > 0);
16607 if (divisor == 1)
16608 return value;
16610 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16611 have to do anything. Only do this when we are not given a const,
16612 because in that case, this check is more expensive than just
16613 doing it. */
16614 if (TREE_CODE (value) != INTEGER_CST)
16616 div = build_int_cst (TREE_TYPE (value), divisor);
16618 if (multiple_of_p (TREE_TYPE (value), value, div))
16619 return value;
16622 /* If divisor is a power of two, simplify this to bit manipulation. */
16623 if (divisor == (divisor & -divisor))
16625 if (TREE_CODE (value) == INTEGER_CST)
16627 double_int val = tree_to_double_int (value);
16628 bool overflow_p;
16630 if ((val.low & (divisor - 1)) == 0)
16631 return value;
16633 overflow_p = TREE_OVERFLOW (value);
16634 val.low &= ~(divisor - 1);
16635 val.low += divisor;
16636 if (val.low == 0)
16638 val.high++;
16639 if (val.high == 0)
16640 overflow_p = true;
16643 return force_fit_type_double (TREE_TYPE (value), val,
16644 -1, overflow_p);
16646 else
16648 tree t;
16650 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16651 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16652 t = build_int_cst (TREE_TYPE (value), -divisor);
16653 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16656 else
16658 if (!div)
16659 div = build_int_cst (TREE_TYPE (value), divisor);
16660 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16661 value = size_binop_loc (loc, MULT_EXPR, value, div);
16664 return value;
16667 /* Likewise, but round down. */
16669 tree
16670 round_down_loc (location_t loc, tree value, int divisor)
16672 tree div = NULL_TREE;
16674 gcc_assert (divisor > 0);
16675 if (divisor == 1)
16676 return value;
16678 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16679 have to do anything. Only do this when we are not given a const,
16680 because in that case, this check is more expensive than just
16681 doing it. */
16682 if (TREE_CODE (value) != INTEGER_CST)
16684 div = build_int_cst (TREE_TYPE (value), divisor);
16686 if (multiple_of_p (TREE_TYPE (value), value, div))
16687 return value;
16690 /* If divisor is a power of two, simplify this to bit manipulation. */
16691 if (divisor == (divisor & -divisor))
16693 tree t;
16695 t = build_int_cst (TREE_TYPE (value), -divisor);
16696 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16698 else
16700 if (!div)
16701 div = build_int_cst (TREE_TYPE (value), divisor);
16702 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16703 value = size_binop_loc (loc, MULT_EXPR, value, div);
16706 return value;
16709 /* Returns the pointer to the base of the object addressed by EXP and
16710 extracts the information about the offset of the access, storing it
16711 to PBITPOS and POFFSET. */
16713 static tree
16714 split_address_to_core_and_offset (tree exp,
16715 HOST_WIDE_INT *pbitpos, tree *poffset)
16717 tree core;
16718 enum machine_mode mode;
16719 int unsignedp, volatilep;
16720 HOST_WIDE_INT bitsize;
16721 location_t loc = EXPR_LOCATION (exp);
16723 if (TREE_CODE (exp) == ADDR_EXPR)
16725 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16726 poffset, &mode, &unsignedp, &volatilep,
16727 false);
16728 core = build_fold_addr_expr_loc (loc, core);
16730 else
16732 core = exp;
16733 *pbitpos = 0;
16734 *poffset = NULL_TREE;
16737 return core;
16740 /* Returns true if addresses of E1 and E2 differ by a constant, false
16741 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16743 bool
16744 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16746 tree core1, core2;
16747 HOST_WIDE_INT bitpos1, bitpos2;
16748 tree toffset1, toffset2, tdiff, type;
16750 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16751 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16753 if (bitpos1 % BITS_PER_UNIT != 0
16754 || bitpos2 % BITS_PER_UNIT != 0
16755 || !operand_equal_p (core1, core2, 0))
16756 return false;
16758 if (toffset1 && toffset2)
16760 type = TREE_TYPE (toffset1);
16761 if (type != TREE_TYPE (toffset2))
16762 toffset2 = fold_convert (type, toffset2);
16764 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16765 if (!cst_and_fits_in_hwi (tdiff))
16766 return false;
16768 *diff = int_cst_value (tdiff);
16770 else if (toffset1 || toffset2)
16772 /* If only one of the offsets is non-constant, the difference cannot
16773 be a constant. */
16774 return false;
16776 else
16777 *diff = 0;
16779 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16780 return true;
16783 /* Simplify the floating point expression EXP when the sign of the
16784 result is not significant. Return NULL_TREE if no simplification
16785 is possible. */
16787 tree
16788 fold_strip_sign_ops (tree exp)
16790 tree arg0, arg1;
16791 location_t loc = EXPR_LOCATION (exp);
16793 switch (TREE_CODE (exp))
16795 case ABS_EXPR:
16796 case NEGATE_EXPR:
16797 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16798 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16800 case MULT_EXPR:
16801 case RDIV_EXPR:
16802 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16803 return NULL_TREE;
16804 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16805 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16806 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16807 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16808 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16809 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16810 break;
16812 case COMPOUND_EXPR:
16813 arg0 = TREE_OPERAND (exp, 0);
16814 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16815 if (arg1)
16816 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16817 break;
16819 case COND_EXPR:
16820 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16821 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16822 if (arg0 || arg1)
16823 return fold_build3_loc (loc,
16824 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16825 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16826 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16827 break;
16829 case CALL_EXPR:
16831 const enum built_in_function fcode = builtin_mathfn_code (exp);
16832 switch (fcode)
16834 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16835 /* Strip copysign function call, return the 1st argument. */
16836 arg0 = CALL_EXPR_ARG (exp, 0);
16837 arg1 = CALL_EXPR_ARG (exp, 1);
16838 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16840 default:
16841 /* Strip sign ops from the argument of "odd" math functions. */
16842 if (negate_mathfn_p (fcode))
16844 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16845 if (arg0)
16846 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16848 break;
16851 break;
16853 default:
16854 break;
16856 return NULL_TREE;