Merge trunk version 195937 into gupc branch.
[official-gcc.git] / gcc / fold-const.c
blob8ef805d81c0ebd9bf899817533f6d136555887e0
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "realmpfr.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "diagnostic-core.h"
55 #include "intl.h"
56 #include "ggc.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
59 #include "md5.h"
60 #include "gimple.h"
61 #include "tree-flow.h"
63 /* Nonzero if we are folding constants inside an initializer; zero
64 otherwise. */
65 int folding_initializer = 0;
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code {
71 COMPCODE_FALSE = 0,
72 COMPCODE_LT = 1,
73 COMPCODE_EQ = 2,
74 COMPCODE_LE = 3,
75 COMPCODE_GT = 4,
76 COMPCODE_LTGT = 5,
77 COMPCODE_GE = 6,
78 COMPCODE_ORD = 7,
79 COMPCODE_UNORD = 8,
80 COMPCODE_UNLT = 9,
81 COMPCODE_UNEQ = 10,
82 COMPCODE_UNLE = 11,
83 COMPCODE_UNGT = 12,
84 COMPCODE_NE = 13,
85 COMPCODE_UNGE = 14,
86 COMPCODE_TRUE = 15
89 static bool negate_mathfn_p (enum built_in_function);
90 static bool negate_expr_p (tree);
91 static tree negate_expr (tree);
92 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
93 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
94 static tree const_binop (enum tree_code, tree, tree);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
101 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (location_t, tree, tree,
103 HOST_WIDE_INT, HOST_WIDE_INT, int);
104 static tree optimize_bit_field_compare (location_t, enum tree_code,
105 tree, tree, tree);
106 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
107 HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (const_tree, int);
111 static tree sign_bit_p (tree, const_tree);
112 static int simple_operand_p (const_tree);
113 static bool simple_operand_p_2 (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree optimize_minmax_comparison (location_t, enum tree_code,
121 tree, tree, tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
124 static tree fold_binary_op_with_conditional_arg (location_t,
125 enum tree_code, tree,
126 tree, tree,
127 tree, tree, int);
128 static tree fold_mathfn_compare (location_t,
129 enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (const_tree, const_tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (const_tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_convert_const (enum tree_code, tree, tree);
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140 Otherwise, return LOC. */
142 static location_t
143 expr_location_or (tree t, location_t loc)
145 location_t tloc = EXPR_LOCATION (t);
146 return tloc == UNKNOWN_LOCATION ? loc : tloc;
149 /* Similar to protected_set_expr_location, but never modify x in place,
150 if location can and needs to be set, unshare it. */
152 static inline tree
153 protected_set_expr_location_unshare (tree x, location_t loc)
155 if (CAN_HAVE_LOCATION_P (x)
156 && EXPR_LOCATION (x) != loc
157 && !(TREE_CODE (x) == SAVE_EXPR
158 || TREE_CODE (x) == TARGET_EXPR
159 || TREE_CODE (x) == BIND_EXPR))
161 x = copy_node (x);
162 SET_EXPR_LOCATION (x, loc);
164 return x;
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168 of type CODE and returns the quotient.
169 Otherwise returns NULL_TREE. */
171 tree
172 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
174 double_int quo, rem;
175 int uns;
177 /* The sign of the division is according to operand two, that
178 does the correct thing for POINTER_PLUS_EXPR where we want
179 a signed division. */
180 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
182 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
183 uns, code, &rem);
185 if (rem.is_zero ())
186 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
188 return NULL_TREE;
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
198 used. */
200 static int fold_deferring_overflow_warnings;
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
207 static const char* fold_deferred_overflow_warning;
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
212 static enum warn_strict_overflow_code fold_deferred_overflow_code;
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
217 void
218 fold_defer_overflow_warnings (void)
220 ++fold_deferring_overflow_warnings;
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
230 deferred code. */
232 void
233 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
235 const char *warnmsg;
236 location_t locus;
238 gcc_assert (fold_deferring_overflow_warnings > 0);
239 --fold_deferring_overflow_warnings;
240 if (fold_deferring_overflow_warnings > 0)
242 if (fold_deferred_overflow_warning != NULL
243 && code != 0
244 && code < (int) fold_deferred_overflow_code)
245 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
246 return;
249 warnmsg = fold_deferred_overflow_warning;
250 fold_deferred_overflow_warning = NULL;
252 if (!issue || warnmsg == NULL)
253 return;
255 if (gimple_no_warning_p (stmt))
256 return;
258 /* Use the smallest code level when deciding to issue the
259 warning. */
260 if (code == 0 || code > (int) fold_deferred_overflow_code)
261 code = fold_deferred_overflow_code;
263 if (!issue_strict_overflow_warning (code))
264 return;
266 if (stmt == NULL)
267 locus = input_location;
268 else
269 locus = gimple_location (stmt);
270 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
273 /* Stop deferring overflow warnings, ignoring any deferred
274 warnings. */
276 void
277 fold_undefer_and_ignore_overflow_warnings (void)
279 fold_undefer_overflow_warnings (false, NULL, 0);
282 /* Whether we are deferring overflow warnings. */
284 bool
285 fold_deferring_overflow_warnings_p (void)
287 return fold_deferring_overflow_warnings > 0;
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
293 static void
294 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
296 if (fold_deferring_overflow_warnings > 0)
298 if (fold_deferred_overflow_warning == NULL
299 || wc < fold_deferred_overflow_code)
301 fold_deferred_overflow_warning = gmsgid;
302 fold_deferred_overflow_code = wc;
305 else if (issue_strict_overflow_warning (wc))
306 warning (OPT_Wstrict_overflow, gmsgid);
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
312 static bool
313 negate_mathfn_p (enum built_in_function code)
315 switch (code)
317 CASE_FLT_FN (BUILT_IN_ASIN):
318 CASE_FLT_FN (BUILT_IN_ASINH):
319 CASE_FLT_FN (BUILT_IN_ATAN):
320 CASE_FLT_FN (BUILT_IN_ATANH):
321 CASE_FLT_FN (BUILT_IN_CASIN):
322 CASE_FLT_FN (BUILT_IN_CASINH):
323 CASE_FLT_FN (BUILT_IN_CATAN):
324 CASE_FLT_FN (BUILT_IN_CATANH):
325 CASE_FLT_FN (BUILT_IN_CBRT):
326 CASE_FLT_FN (BUILT_IN_CPROJ):
327 CASE_FLT_FN (BUILT_IN_CSIN):
328 CASE_FLT_FN (BUILT_IN_CSINH):
329 CASE_FLT_FN (BUILT_IN_CTAN):
330 CASE_FLT_FN (BUILT_IN_CTANH):
331 CASE_FLT_FN (BUILT_IN_ERF):
332 CASE_FLT_FN (BUILT_IN_LLROUND):
333 CASE_FLT_FN (BUILT_IN_LROUND):
334 CASE_FLT_FN (BUILT_IN_ROUND):
335 CASE_FLT_FN (BUILT_IN_SIN):
336 CASE_FLT_FN (BUILT_IN_SINH):
337 CASE_FLT_FN (BUILT_IN_TAN):
338 CASE_FLT_FN (BUILT_IN_TANH):
339 CASE_FLT_FN (BUILT_IN_TRUNC):
340 return true;
342 CASE_FLT_FN (BUILT_IN_LLRINT):
343 CASE_FLT_FN (BUILT_IN_LRINT):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT):
345 CASE_FLT_FN (BUILT_IN_RINT):
346 return !flag_rounding_math;
348 default:
349 break;
351 return false;
354 /* Check whether we may negate an integer constant T without causing
355 overflow. */
357 bool
358 may_negate_without_overflow_p (const_tree t)
360 unsigned HOST_WIDE_INT val;
361 unsigned int prec;
362 tree type;
364 gcc_assert (TREE_CODE (t) == INTEGER_CST);
366 type = TREE_TYPE (t);
367 if (TYPE_UNSIGNED (type))
368 return false;
370 prec = TYPE_PRECISION (type);
371 if (prec > HOST_BITS_PER_WIDE_INT)
373 if (TREE_INT_CST_LOW (t) != 0)
374 return true;
375 prec -= HOST_BITS_PER_WIDE_INT;
376 val = TREE_INT_CST_HIGH (t);
378 else
379 val = TREE_INT_CST_LOW (t);
380 if (prec < HOST_BITS_PER_WIDE_INT)
381 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
382 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
388 static bool
389 negate_expr_p (tree t)
391 tree type;
393 if (t == 0)
394 return false;
396 type = TREE_TYPE (t);
398 STRIP_SIGN_NOPS (t);
399 switch (TREE_CODE (t))
401 case INTEGER_CST:
402 if (TYPE_OVERFLOW_WRAPS (type))
403 return true;
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
407 case BIT_NOT_EXPR:
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
411 case FIXED_CST:
412 case NEGATE_EXPR:
413 return true;
415 case REAL_CST:
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
420 case COMPLEX_CST:
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
433 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
434 return false;
435 /* -(A + B) -> (-B) - A. */
436 if (negate_expr_p (TREE_OPERAND (t, 1))
437 && reorder_operands_p (TREE_OPERAND (t, 0),
438 TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
446 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
447 && reorder_operands_p (TREE_OPERAND (t, 0),
448 TREE_OPERAND (t, 1));
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (TREE_TYPE (t)))
452 break;
454 /* Fall through. */
456 case RDIV_EXPR:
457 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
458 return negate_expr_p (TREE_OPERAND (t, 1))
459 || negate_expr_p (TREE_OPERAND (t, 0));
460 break;
462 case TRUNC_DIV_EXPR:
463 case ROUND_DIV_EXPR:
464 case FLOOR_DIV_EXPR:
465 case CEIL_DIV_EXPR:
466 case EXACT_DIV_EXPR:
467 /* In general we can't negate A / B, because if A is INT_MIN and
468 B is 1, we may turn this into INT_MIN / -1 which is undefined
469 and actually traps on some architectures. But if overflow is
470 undefined, we can negate, because - (INT_MIN / 1) is an
471 overflow. */
472 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
473 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
474 break;
475 return negate_expr_p (TREE_OPERAND (t, 1))
476 || negate_expr_p (TREE_OPERAND (t, 0));
478 case NOP_EXPR:
479 /* Negate -((double)float) as (double)(-float). */
480 if (TREE_CODE (type) == REAL_TYPE)
482 tree tem = strip_float_extensions (t);
483 if (tem != t)
484 return negate_expr_p (tem);
486 break;
488 case CALL_EXPR:
489 /* Negate -f(x) as f(-x). */
490 if (negate_mathfn_p (builtin_mathfn_code (t)))
491 return negate_expr_p (CALL_EXPR_ARG (t, 0));
492 break;
494 case RSHIFT_EXPR:
495 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
496 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
498 tree op1 = TREE_OPERAND (t, 1);
499 if (TREE_INT_CST_HIGH (op1) == 0
500 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
501 == TREE_INT_CST_LOW (op1))
502 return true;
504 break;
506 default:
507 break;
509 return false;
512 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
513 simplification is possible.
514 If negate_expr_p would return true for T, NULL_TREE will never be
515 returned. */
517 static tree
518 fold_negate_expr (location_t loc, tree t)
520 tree type = TREE_TYPE (t);
521 tree tem;
523 switch (TREE_CODE (t))
525 /* Convert - (~A) to A + 1. */
526 case BIT_NOT_EXPR:
527 if (INTEGRAL_TYPE_P (type))
528 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
529 build_int_cst (type, 1));
530 break;
532 case INTEGER_CST:
533 tem = fold_negate_const (t, type);
534 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
535 || !TYPE_OVERFLOW_TRAPS (type))
536 return tem;
537 break;
539 case REAL_CST:
540 tem = fold_negate_const (t, type);
541 /* Two's complement FP formats, such as c4x, may overflow. */
542 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
543 return tem;
544 break;
546 case FIXED_CST:
547 tem = fold_negate_const (t, type);
548 return tem;
550 case COMPLEX_CST:
552 tree rpart = negate_expr (TREE_REALPART (t));
553 tree ipart = negate_expr (TREE_IMAGPART (t));
555 if ((TREE_CODE (rpart) == REAL_CST
556 && TREE_CODE (ipart) == REAL_CST)
557 || (TREE_CODE (rpart) == INTEGER_CST
558 && TREE_CODE (ipart) == INTEGER_CST))
559 return build_complex (type, rpart, ipart);
561 break;
563 case COMPLEX_EXPR:
564 if (negate_expr_p (t))
565 return fold_build2_loc (loc, COMPLEX_EXPR, type,
566 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
567 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
568 break;
570 case CONJ_EXPR:
571 if (negate_expr_p (t))
572 return fold_build1_loc (loc, CONJ_EXPR, type,
573 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
574 break;
576 case NEGATE_EXPR:
577 return TREE_OPERAND (t, 0);
579 case PLUS_EXPR:
580 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
581 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
583 /* -(A + B) -> (-B) - A. */
584 if (negate_expr_p (TREE_OPERAND (t, 1))
585 && reorder_operands_p (TREE_OPERAND (t, 0),
586 TREE_OPERAND (t, 1)))
588 tem = negate_expr (TREE_OPERAND (t, 1));
589 return fold_build2_loc (loc, MINUS_EXPR, type,
590 tem, TREE_OPERAND (t, 0));
593 /* -(A + B) -> (-A) - B. */
594 if (negate_expr_p (TREE_OPERAND (t, 0)))
596 tem = negate_expr (TREE_OPERAND (t, 0));
597 return fold_build2_loc (loc, MINUS_EXPR, type,
598 tem, TREE_OPERAND (t, 1));
601 break;
603 case MINUS_EXPR:
604 /* - (A - B) -> B - A */
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
606 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
607 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
608 return fold_build2_loc (loc, MINUS_EXPR, type,
609 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
610 break;
612 case MULT_EXPR:
613 if (TYPE_UNSIGNED (type))
614 break;
616 /* Fall through. */
618 case RDIV_EXPR:
619 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
621 tem = TREE_OPERAND (t, 1);
622 if (negate_expr_p (tem))
623 return fold_build2_loc (loc, TREE_CODE (t), type,
624 TREE_OPERAND (t, 0), negate_expr (tem));
625 tem = TREE_OPERAND (t, 0);
626 if (negate_expr_p (tem))
627 return fold_build2_loc (loc, TREE_CODE (t), type,
628 negate_expr (tem), TREE_OPERAND (t, 1));
630 break;
632 case TRUNC_DIV_EXPR:
633 case ROUND_DIV_EXPR:
634 case FLOOR_DIV_EXPR:
635 case CEIL_DIV_EXPR:
636 case EXACT_DIV_EXPR:
637 /* In general we can't negate A / B, because if A is INT_MIN and
638 B is 1, we may turn this into INT_MIN / -1 which is undefined
639 and actually traps on some architectures. But if overflow is
640 undefined, we can negate, because - (INT_MIN / 1) is an
641 overflow. */
642 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
644 const char * const warnmsg = G_("assuming signed overflow does not "
645 "occur when negating a division");
646 tem = TREE_OPERAND (t, 1);
647 if (negate_expr_p (tem))
649 if (INTEGRAL_TYPE_P (type)
650 && (TREE_CODE (tem) != INTEGER_CST
651 || integer_onep (tem)))
652 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
653 return fold_build2_loc (loc, TREE_CODE (t), type,
654 TREE_OPERAND (t, 0), negate_expr (tem));
656 tem = TREE_OPERAND (t, 0);
657 if (negate_expr_p (tem))
659 if (INTEGRAL_TYPE_P (type)
660 && (TREE_CODE (tem) != INTEGER_CST
661 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
662 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
663 return fold_build2_loc (loc, TREE_CODE (t), type,
664 negate_expr (tem), TREE_OPERAND (t, 1));
667 break;
669 case NOP_EXPR:
670 /* Convert -((double)float) into (double)(-float). */
671 if (TREE_CODE (type) == REAL_TYPE)
673 tem = strip_float_extensions (t);
674 if (tem != t && negate_expr_p (tem))
675 return fold_convert_loc (loc, type, negate_expr (tem));
677 break;
679 case CALL_EXPR:
680 /* Negate -f(x) as f(-x). */
681 if (negate_mathfn_p (builtin_mathfn_code (t))
682 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
684 tree fndecl, arg;
686 fndecl = get_callee_fndecl (t);
687 arg = negate_expr (CALL_EXPR_ARG (t, 0));
688 return build_call_expr_loc (loc, fndecl, 1, arg);
690 break;
692 case RSHIFT_EXPR:
693 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
694 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
696 tree op1 = TREE_OPERAND (t, 1);
697 if (TREE_INT_CST_HIGH (op1) == 0
698 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
699 == TREE_INT_CST_LOW (op1))
701 tree ntype = TYPE_UNSIGNED (type)
702 ? signed_type_for (type)
703 : unsigned_type_for (type);
704 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
705 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
706 return fold_convert_loc (loc, type, temp);
709 break;
711 default:
712 break;
715 return NULL_TREE;
718 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
719 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
720 return NULL_TREE. */
722 static tree
723 negate_expr (tree t)
725 tree type, tem;
726 location_t loc;
728 if (t == NULL_TREE)
729 return NULL_TREE;
731 loc = EXPR_LOCATION (t);
732 type = TREE_TYPE (t);
733 STRIP_SIGN_NOPS (t);
735 tem = fold_negate_expr (loc, t);
736 if (!tem)
737 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
738 return fold_convert_loc (loc, type, tem);
741 /* Split a tree IN into a constant, literal and variable parts that could be
742 combined with CODE to make IN. "constant" means an expression with
743 TREE_CONSTANT but that isn't an actual constant. CODE must be a
744 commutative arithmetic operation. Store the constant part into *CONP,
745 the literal in *LITP and return the variable part. If a part isn't
746 present, set it to null. If the tree does not decompose in this way,
747 return the entire tree as the variable part and the other parts as null.
749 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
750 case, we negate an operand that was subtracted. Except if it is a
751 literal for which we use *MINUS_LITP instead.
753 If NEGATE_P is true, we are negating all of IN, again except a literal
754 for which we use *MINUS_LITP instead.
756 If IN is itself a literal or constant, return it as appropriate.
758 Note that we do not guarantee that any of the three values will be the
759 same type as IN, but they will have the same signedness and mode. */
761 static tree
762 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
763 tree *minus_litp, int negate_p)
765 tree var = 0;
767 *conp = 0;
768 *litp = 0;
769 *minus_litp = 0;
771 /* Strip any conversions that don't change the machine mode or signedness. */
772 STRIP_SIGN_NOPS (in);
774 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
775 || TREE_CODE (in) == FIXED_CST)
776 *litp = in;
777 else if (TREE_CODE (in) == code
778 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
779 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
780 /* We can associate addition and subtraction together (even
781 though the C standard doesn't say so) for integers because
782 the value is not affected. For reals, the value might be
783 affected, so we can't. */
784 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
785 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
787 tree op0 = TREE_OPERAND (in, 0);
788 tree op1 = TREE_OPERAND (in, 1);
789 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
790 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
792 /* First see if either of the operands is a literal, then a constant. */
793 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
794 || TREE_CODE (op0) == FIXED_CST)
795 *litp = op0, op0 = 0;
796 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
797 || TREE_CODE (op1) == FIXED_CST)
798 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
800 if (op0 != 0 && TREE_CONSTANT (op0))
801 *conp = op0, op0 = 0;
802 else if (op1 != 0 && TREE_CONSTANT (op1))
803 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
805 /* If we haven't dealt with either operand, this is not a case we can
806 decompose. Otherwise, VAR is either of the ones remaining, if any. */
807 if (op0 != 0 && op1 != 0)
808 var = in;
809 else if (op0 != 0)
810 var = op0;
811 else
812 var = op1, neg_var_p = neg1_p;
814 /* Now do any needed negations. */
815 if (neg_litp_p)
816 *minus_litp = *litp, *litp = 0;
817 if (neg_conp_p)
818 *conp = negate_expr (*conp);
819 if (neg_var_p)
820 var = negate_expr (var);
822 else if (TREE_CODE (in) == BIT_NOT_EXPR
823 && code == PLUS_EXPR)
825 /* -X - 1 is folded to ~X, undo that here. */
826 *minus_litp = build_one_cst (TREE_TYPE (in));
827 var = negate_expr (TREE_OPERAND (in, 0));
829 else if (TREE_CONSTANT (in))
830 *conp = in;
831 else
832 var = in;
834 if (negate_p)
836 if (*litp)
837 *minus_litp = *litp, *litp = 0;
838 else if (*minus_litp)
839 *litp = *minus_litp, *minus_litp = 0;
840 *conp = negate_expr (*conp);
841 var = negate_expr (var);
844 return var;
847 /* Re-associate trees split by the above function. T1 and T2 are
848 either expressions to associate or null. Return the new
849 expression, if any. LOC is the location of the new expression. If
850 we build an operation, do it in TYPE and with CODE. */
852 static tree
853 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
855 if (t1 == 0)
856 return t2;
857 else if (t2 == 0)
858 return t1;
860 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
861 try to fold this since we will have infinite recursion. But do
862 deal with any NEGATE_EXPRs. */
863 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
864 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
866 if (code == PLUS_EXPR)
868 if (TREE_CODE (t1) == NEGATE_EXPR)
869 return build2_loc (loc, MINUS_EXPR, type,
870 fold_convert_loc (loc, type, t2),
871 fold_convert_loc (loc, type,
872 TREE_OPERAND (t1, 0)));
873 else if (TREE_CODE (t2) == NEGATE_EXPR)
874 return build2_loc (loc, MINUS_EXPR, type,
875 fold_convert_loc (loc, type, t1),
876 fold_convert_loc (loc, type,
877 TREE_OPERAND (t2, 0)));
878 else if (integer_zerop (t2))
879 return fold_convert_loc (loc, type, t1);
881 else if (code == MINUS_EXPR)
883 if (integer_zerop (t2))
884 return fold_convert_loc (loc, type, t1);
887 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
888 fold_convert_loc (loc, type, t2));
891 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
892 fold_convert_loc (loc, type, t2));
895 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
896 for use in int_const_binop, size_binop and size_diffop. */
898 static bool
899 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
901 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
902 return false;
903 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
904 return false;
906 switch (code)
908 case LSHIFT_EXPR:
909 case RSHIFT_EXPR:
910 case LROTATE_EXPR:
911 case RROTATE_EXPR:
912 return true;
914 default:
915 break;
918 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
919 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
920 && TYPE_MODE (type1) == TYPE_MODE (type2);
924 /* Combine two integer constants ARG1 and ARG2 under operation CODE
925 to produce a new constant. Return NULL_TREE if we don't know how
926 to evaluate CODE at compile-time. */
928 static tree
929 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
930 int overflowable)
932 double_int op1, op2, res, tmp;
933 tree t;
934 tree type = TREE_TYPE (arg1);
935 bool uns = TYPE_UNSIGNED (type);
936 bool overflow = false;
938 op1 = tree_to_double_int (arg1);
939 op2 = tree_to_double_int (arg2);
941 switch (code)
943 case BIT_IOR_EXPR:
944 res = op1 | op2;
945 break;
947 case BIT_XOR_EXPR:
948 res = op1 ^ op2;
949 break;
951 case BIT_AND_EXPR:
952 res = op1 & op2;
953 break;
955 case RSHIFT_EXPR:
956 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
957 break;
959 case LSHIFT_EXPR:
960 /* It's unclear from the C standard whether shifts can overflow.
961 The following code ignores overflow; perhaps a C standard
962 interpretation ruling is needed. */
963 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
964 break;
966 case RROTATE_EXPR:
967 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
968 break;
970 case LROTATE_EXPR:
971 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
972 break;
974 case PLUS_EXPR:
975 res = op1.add_with_sign (op2, false, &overflow);
976 break;
978 case MINUS_EXPR:
979 res = op1.sub_with_overflow (op2, &overflow);
980 break;
982 case MULT_EXPR:
983 res = op1.mul_with_sign (op2, false, &overflow);
984 break;
986 case MULT_HIGHPART_EXPR:
987 /* ??? Need quad precision, or an additional shift operand
988 to the multiply primitive, to handle very large highparts. */
989 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
990 return NULL_TREE;
991 tmp = op1 - op2;
992 res = tmp.rshift (TYPE_PRECISION (type), TYPE_PRECISION (type), !uns);
993 break;
995 case TRUNC_DIV_EXPR:
996 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
997 case EXACT_DIV_EXPR:
998 /* This is a shortcut for a common special case. */
999 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1000 && !TREE_OVERFLOW (arg1)
1001 && !TREE_OVERFLOW (arg2)
1002 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1004 if (code == CEIL_DIV_EXPR)
1005 op1.low += op2.low - 1;
1007 res.low = op1.low / op2.low, res.high = 0;
1008 break;
1011 /* ... fall through ... */
1013 case ROUND_DIV_EXPR:
1014 if (op2.is_zero ())
1015 return NULL_TREE;
1016 if (op2.is_one ())
1018 res = op1;
1019 break;
1021 if (op1 == op2 && !op1.is_zero ())
1023 res = double_int_one;
1024 break;
1026 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1027 break;
1029 case TRUNC_MOD_EXPR:
1030 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1031 /* This is a shortcut for a common special case. */
1032 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1033 && !TREE_OVERFLOW (arg1)
1034 && !TREE_OVERFLOW (arg2)
1035 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1037 if (code == CEIL_MOD_EXPR)
1038 op1.low += op2.low - 1;
1039 res.low = op1.low % op2.low, res.high = 0;
1040 break;
1043 /* ... fall through ... */
1045 case ROUND_MOD_EXPR:
1046 if (op2.is_zero ())
1047 return NULL_TREE;
1048 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1049 break;
1051 case MIN_EXPR:
1052 res = op1.min (op2, uns);
1053 break;
1055 case MAX_EXPR:
1056 res = op1.max (op2, uns);
1057 break;
1059 default:
1060 return NULL_TREE;
1063 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1064 (!uns && overflow)
1065 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1067 return t;
1070 tree
1071 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1073 return int_const_binop_1 (code, arg1, arg2, 1);
1076 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1077 constant. We assume ARG1 and ARG2 have the same data type, or at least
1078 are the same kind of constant and the same machine mode. Return zero if
1079 combining the constants is not allowed in the current operating mode. */
1081 static tree
1082 const_binop (enum tree_code code, tree arg1, tree arg2)
1084 /* Sanity check for the recursive cases. */
1085 if (!arg1 || !arg2)
1086 return NULL_TREE;
1088 STRIP_NOPS (arg1);
1089 STRIP_NOPS (arg2);
1091 if (TREE_CODE (arg1) == INTEGER_CST)
1092 return int_const_binop (code, arg1, arg2);
1094 if (TREE_CODE (arg1) == REAL_CST)
1096 enum machine_mode mode;
1097 REAL_VALUE_TYPE d1;
1098 REAL_VALUE_TYPE d2;
1099 REAL_VALUE_TYPE value;
1100 REAL_VALUE_TYPE result;
1101 bool inexact;
1102 tree t, type;
1104 /* The following codes are handled by real_arithmetic. */
1105 switch (code)
1107 case PLUS_EXPR:
1108 case MINUS_EXPR:
1109 case MULT_EXPR:
1110 case RDIV_EXPR:
1111 case MIN_EXPR:
1112 case MAX_EXPR:
1113 break;
1115 default:
1116 return NULL_TREE;
1119 d1 = TREE_REAL_CST (arg1);
1120 d2 = TREE_REAL_CST (arg2);
1122 type = TREE_TYPE (arg1);
1123 mode = TYPE_MODE (type);
1125 /* Don't perform operation if we honor signaling NaNs and
1126 either operand is a NaN. */
1127 if (HONOR_SNANS (mode)
1128 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1129 return NULL_TREE;
1131 /* Don't perform operation if it would raise a division
1132 by zero exception. */
1133 if (code == RDIV_EXPR
1134 && REAL_VALUES_EQUAL (d2, dconst0)
1135 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1136 return NULL_TREE;
1138 /* If either operand is a NaN, just return it. Otherwise, set up
1139 for floating-point trap; we return an overflow. */
1140 if (REAL_VALUE_ISNAN (d1))
1141 return arg1;
1142 else if (REAL_VALUE_ISNAN (d2))
1143 return arg2;
1145 inexact = real_arithmetic (&value, code, &d1, &d2);
1146 real_convert (&result, mode, &value);
1148 /* Don't constant fold this floating point operation if
1149 the result has overflowed and flag_trapping_math. */
1150 if (flag_trapping_math
1151 && MODE_HAS_INFINITIES (mode)
1152 && REAL_VALUE_ISINF (result)
1153 && !REAL_VALUE_ISINF (d1)
1154 && !REAL_VALUE_ISINF (d2))
1155 return NULL_TREE;
1157 /* Don't constant fold this floating point operation if the
1158 result may dependent upon the run-time rounding mode and
1159 flag_rounding_math is set, or if GCC's software emulation
1160 is unable to accurately represent the result. */
1161 if ((flag_rounding_math
1162 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1163 && (inexact || !real_identical (&result, &value)))
1164 return NULL_TREE;
1166 t = build_real (type, result);
1168 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1169 return t;
1172 if (TREE_CODE (arg1) == FIXED_CST)
1174 FIXED_VALUE_TYPE f1;
1175 FIXED_VALUE_TYPE f2;
1176 FIXED_VALUE_TYPE result;
1177 tree t, type;
1178 int sat_p;
1179 bool overflow_p;
1181 /* The following codes are handled by fixed_arithmetic. */
1182 switch (code)
1184 case PLUS_EXPR:
1185 case MINUS_EXPR:
1186 case MULT_EXPR:
1187 case TRUNC_DIV_EXPR:
1188 f2 = TREE_FIXED_CST (arg2);
1189 break;
1191 case LSHIFT_EXPR:
1192 case RSHIFT_EXPR:
1193 f2.data.high = TREE_INT_CST_HIGH (arg2);
1194 f2.data.low = TREE_INT_CST_LOW (arg2);
1195 f2.mode = SImode;
1196 break;
1198 default:
1199 return NULL_TREE;
1202 f1 = TREE_FIXED_CST (arg1);
1203 type = TREE_TYPE (arg1);
1204 sat_p = TYPE_SATURATING (type);
1205 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1206 t = build_fixed (type, result);
1207 /* Propagate overflow flags. */
1208 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1209 TREE_OVERFLOW (t) = 1;
1210 return t;
1213 if (TREE_CODE (arg1) == COMPLEX_CST)
1215 tree type = TREE_TYPE (arg1);
1216 tree r1 = TREE_REALPART (arg1);
1217 tree i1 = TREE_IMAGPART (arg1);
1218 tree r2 = TREE_REALPART (arg2);
1219 tree i2 = TREE_IMAGPART (arg2);
1220 tree real, imag;
1222 switch (code)
1224 case PLUS_EXPR:
1225 case MINUS_EXPR:
1226 real = const_binop (code, r1, r2);
1227 imag = const_binop (code, i1, i2);
1228 break;
1230 case MULT_EXPR:
1231 if (COMPLEX_FLOAT_TYPE_P (type))
1232 return do_mpc_arg2 (arg1, arg2, type,
1233 /* do_nonfinite= */ folding_initializer,
1234 mpc_mul);
1236 real = const_binop (MINUS_EXPR,
1237 const_binop (MULT_EXPR, r1, r2),
1238 const_binop (MULT_EXPR, i1, i2));
1239 imag = const_binop (PLUS_EXPR,
1240 const_binop (MULT_EXPR, r1, i2),
1241 const_binop (MULT_EXPR, i1, r2));
1242 break;
1244 case RDIV_EXPR:
1245 if (COMPLEX_FLOAT_TYPE_P (type))
1246 return do_mpc_arg2 (arg1, arg2, type,
1247 /* do_nonfinite= */ folding_initializer,
1248 mpc_div);
1249 /* Fallthru ... */
1250 case TRUNC_DIV_EXPR:
1251 case CEIL_DIV_EXPR:
1252 case FLOOR_DIV_EXPR:
1253 case ROUND_DIV_EXPR:
1254 if (flag_complex_method == 0)
1256 /* Keep this algorithm in sync with
1257 tree-complex.c:expand_complex_div_straight().
1259 Expand complex division to scalars, straightforward algorithm.
1260 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1261 t = br*br + bi*bi
1263 tree magsquared
1264 = const_binop (PLUS_EXPR,
1265 const_binop (MULT_EXPR, r2, r2),
1266 const_binop (MULT_EXPR, i2, i2));
1267 tree t1
1268 = const_binop (PLUS_EXPR,
1269 const_binop (MULT_EXPR, r1, r2),
1270 const_binop (MULT_EXPR, i1, i2));
1271 tree t2
1272 = const_binop (MINUS_EXPR,
1273 const_binop (MULT_EXPR, i1, r2),
1274 const_binop (MULT_EXPR, r1, i2));
1276 real = const_binop (code, t1, magsquared);
1277 imag = const_binop (code, t2, magsquared);
1279 else
1281 /* Keep this algorithm in sync with
1282 tree-complex.c:expand_complex_div_wide().
1284 Expand complex division to scalars, modified algorithm to minimize
1285 overflow with wide input ranges. */
1286 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1287 fold_abs_const (r2, TREE_TYPE (type)),
1288 fold_abs_const (i2, TREE_TYPE (type)));
1290 if (integer_nonzerop (compare))
1292 /* In the TRUE branch, we compute
1293 ratio = br/bi;
1294 div = (br * ratio) + bi;
1295 tr = (ar * ratio) + ai;
1296 ti = (ai * ratio) - ar;
1297 tr = tr / div;
1298 ti = ti / div; */
1299 tree ratio = const_binop (code, r2, i2);
1300 tree div = const_binop (PLUS_EXPR, i2,
1301 const_binop (MULT_EXPR, r2, ratio));
1302 real = const_binop (MULT_EXPR, r1, ratio);
1303 real = const_binop (PLUS_EXPR, real, i1);
1304 real = const_binop (code, real, div);
1306 imag = const_binop (MULT_EXPR, i1, ratio);
1307 imag = const_binop (MINUS_EXPR, imag, r1);
1308 imag = const_binop (code, imag, div);
1310 else
1312 /* In the FALSE branch, we compute
1313 ratio = d/c;
1314 divisor = (d * ratio) + c;
1315 tr = (b * ratio) + a;
1316 ti = b - (a * ratio);
1317 tr = tr / div;
1318 ti = ti / div; */
1319 tree ratio = const_binop (code, i2, r2);
1320 tree div = const_binop (PLUS_EXPR, r2,
1321 const_binop (MULT_EXPR, i2, ratio));
1323 real = const_binop (MULT_EXPR, i1, ratio);
1324 real = const_binop (PLUS_EXPR, real, r1);
1325 real = const_binop (code, real, div);
1327 imag = const_binop (MULT_EXPR, r1, ratio);
1328 imag = const_binop (MINUS_EXPR, i1, imag);
1329 imag = const_binop (code, imag, div);
1332 break;
1334 default:
1335 return NULL_TREE;
1338 if (real && imag)
1339 return build_complex (type, real, imag);
1342 if (TREE_CODE (arg1) == VECTOR_CST
1343 && TREE_CODE (arg2) == VECTOR_CST)
1345 tree type = TREE_TYPE(arg1);
1346 int count = TYPE_VECTOR_SUBPARTS (type), i;
1347 tree *elts = XALLOCAVEC (tree, count);
1349 for (i = 0; i < count; i++)
1351 tree elem1 = VECTOR_CST_ELT (arg1, i);
1352 tree elem2 = VECTOR_CST_ELT (arg2, i);
1354 elts[i] = const_binop (code, elem1, elem2);
1356 /* It is possible that const_binop cannot handle the given
1357 code and return NULL_TREE */
1358 if(elts[i] == NULL_TREE)
1359 return NULL_TREE;
1362 return build_vector (type, elts);
1364 return NULL_TREE;
1367 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1368 indicates which particular sizetype to create. */
1370 tree
1371 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1373 return build_int_cst (sizetype_tab[(int) kind], number);
1376 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1377 is a tree code. The type of the result is taken from the operands.
1378 Both must be equivalent integer types, ala int_binop_types_match_p.
1379 If the operands are constant, so is the result. */
1381 tree
1382 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1384 tree type = TREE_TYPE (arg0);
1386 if (arg0 == error_mark_node || arg1 == error_mark_node)
1387 return error_mark_node;
1389 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1390 TREE_TYPE (arg1)));
1392 /* Handle the special case of two integer constants faster. */
1393 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1395 /* And some specific cases even faster than that. */
1396 if (code == PLUS_EXPR)
1398 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1399 return arg1;
1400 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1401 return arg0;
1403 else if (code == MINUS_EXPR)
1405 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1406 return arg0;
1408 else if (code == MULT_EXPR)
1410 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1411 return arg1;
1414 /* Handle general case of two integer constants. For sizetype
1415 constant calculations we always want to know about overflow,
1416 even in the unsigned case. */
1417 return int_const_binop_1 (code, arg0, arg1, -1);
1420 return fold_build2_loc (loc, code, type, arg0, arg1);
1423 /* Given two values, either both of sizetype or both of bitsizetype,
1424 compute the difference between the two values. Return the value
1425 in signed type corresponding to the type of the operands. */
1427 tree
1428 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1430 tree type = TREE_TYPE (arg0);
1431 tree ctype;
1433 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1434 TREE_TYPE (arg1)));
1436 /* If the type is already signed, just do the simple thing. */
1437 if (!TYPE_UNSIGNED (type))
1438 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1440 if (type == sizetype)
1441 ctype = ssizetype;
1442 else if (type == bitsizetype)
1443 ctype = sbitsizetype;
1444 else
1445 ctype = signed_type_for (type);
1447 /* If either operand is not a constant, do the conversions to the signed
1448 type and subtract. The hardware will do the right thing with any
1449 overflow in the subtraction. */
1450 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1451 return size_binop_loc (loc, MINUS_EXPR,
1452 fold_convert_loc (loc, ctype, arg0),
1453 fold_convert_loc (loc, ctype, arg1));
1455 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1456 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1457 overflow) and negate (which can't either). Special-case a result
1458 of zero while we're here. */
1459 if (tree_int_cst_equal (arg0, arg1))
1460 return build_int_cst (ctype, 0);
1461 else if (tree_int_cst_lt (arg1, arg0))
1462 return fold_convert_loc (loc, ctype,
1463 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1464 else
1465 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1466 fold_convert_loc (loc, ctype,
1467 size_binop_loc (loc,
1468 MINUS_EXPR,
1469 arg1, arg0)));
1472 /* A subroutine of fold_convert_const handling conversions of an
1473 INTEGER_CST to another integer type. */
1475 static tree
1476 fold_convert_const_int_from_int (tree type, const_tree arg1)
1478 tree t;
1480 /* Given an integer constant, make new constant with new type,
1481 appropriately sign-extended or truncated. */
1482 t = force_fit_type_double (type, tree_to_double_int (arg1),
1483 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1484 (TREE_INT_CST_HIGH (arg1) < 0
1485 && (TYPE_UNSIGNED (type)
1486 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1487 | TREE_OVERFLOW (arg1));
1489 return t;
1492 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1493 to an integer type. */
1495 static tree
1496 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1498 int overflow = 0;
1499 tree t;
1501 /* The following code implements the floating point to integer
1502 conversion rules required by the Java Language Specification,
1503 that IEEE NaNs are mapped to zero and values that overflow
1504 the target precision saturate, i.e. values greater than
1505 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1506 are mapped to INT_MIN. These semantics are allowed by the
1507 C and C++ standards that simply state that the behavior of
1508 FP-to-integer conversion is unspecified upon overflow. */
1510 double_int val;
1511 REAL_VALUE_TYPE r;
1512 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1514 switch (code)
1516 case FIX_TRUNC_EXPR:
1517 real_trunc (&r, VOIDmode, &x);
1518 break;
1520 default:
1521 gcc_unreachable ();
1524 /* If R is NaN, return zero and show we have an overflow. */
1525 if (REAL_VALUE_ISNAN (r))
1527 overflow = 1;
1528 val = double_int_zero;
1531 /* See if R is less than the lower bound or greater than the
1532 upper bound. */
1534 if (! overflow)
1536 tree lt = TYPE_MIN_VALUE (type);
1537 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1538 if (REAL_VALUES_LESS (r, l))
1540 overflow = 1;
1541 val = tree_to_double_int (lt);
1545 if (! overflow)
1547 tree ut = TYPE_MAX_VALUE (type);
1548 if (ut)
1550 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1551 if (REAL_VALUES_LESS (u, r))
1553 overflow = 1;
1554 val = tree_to_double_int (ut);
1559 if (! overflow)
1560 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1562 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1563 return t;
1566 /* A subroutine of fold_convert_const handling conversions of a
1567 FIXED_CST to an integer type. */
1569 static tree
1570 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1572 tree t;
1573 double_int temp, temp_trunc;
1574 unsigned int mode;
1576 /* Right shift FIXED_CST to temp by fbit. */
1577 temp = TREE_FIXED_CST (arg1).data;
1578 mode = TREE_FIXED_CST (arg1).mode;
1579 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1581 temp = temp.rshift (GET_MODE_FBIT (mode),
1582 HOST_BITS_PER_DOUBLE_INT,
1583 SIGNED_FIXED_POINT_MODE_P (mode));
1585 /* Left shift temp to temp_trunc by fbit. */
1586 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1587 HOST_BITS_PER_DOUBLE_INT,
1588 SIGNED_FIXED_POINT_MODE_P (mode));
1590 else
1592 temp = double_int_zero;
1593 temp_trunc = double_int_zero;
1596 /* If FIXED_CST is negative, we need to round the value toward 0.
1597 By checking if the fractional bits are not zero to add 1 to temp. */
1598 if (SIGNED_FIXED_POINT_MODE_P (mode)
1599 && temp_trunc.is_negative ()
1600 && TREE_FIXED_CST (arg1).data != temp_trunc)
1601 temp += double_int_one;
1603 /* Given a fixed-point constant, make new constant with new type,
1604 appropriately sign-extended or truncated. */
1605 t = force_fit_type_double (type, temp, -1,
1606 (temp.is_negative ()
1607 && (TYPE_UNSIGNED (type)
1608 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1609 | TREE_OVERFLOW (arg1));
1611 return t;
1614 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1615 to another floating point type. */
1617 static tree
1618 fold_convert_const_real_from_real (tree type, const_tree arg1)
1620 REAL_VALUE_TYPE value;
1621 tree t;
1623 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1624 t = build_real (type, value);
1626 /* If converting an infinity or NAN to a representation that doesn't
1627 have one, set the overflow bit so that we can produce some kind of
1628 error message at the appropriate point if necessary. It's not the
1629 most user-friendly message, but it's better than nothing. */
1630 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1631 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1632 TREE_OVERFLOW (t) = 1;
1633 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1634 && !MODE_HAS_NANS (TYPE_MODE (type)))
1635 TREE_OVERFLOW (t) = 1;
1636 /* Regular overflow, conversion produced an infinity in a mode that
1637 can't represent them. */
1638 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1639 && REAL_VALUE_ISINF (value)
1640 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1641 TREE_OVERFLOW (t) = 1;
1642 else
1643 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1644 return t;
1647 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1648 to a floating point type. */
1650 static tree
1651 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1653 REAL_VALUE_TYPE value;
1654 tree t;
1656 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1657 t = build_real (type, value);
1659 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1660 return t;
1663 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1664 to another fixed-point type. */
1666 static tree
1667 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1669 FIXED_VALUE_TYPE value;
1670 tree t;
1671 bool overflow_p;
1673 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1674 TYPE_SATURATING (type));
1675 t = build_fixed (type, value);
1677 /* Propagate overflow flags. */
1678 if (overflow_p | TREE_OVERFLOW (arg1))
1679 TREE_OVERFLOW (t) = 1;
1680 return t;
1683 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1684 to a fixed-point type. */
1686 static tree
1687 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1689 FIXED_VALUE_TYPE value;
1690 tree t;
1691 bool overflow_p;
1693 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1694 TREE_INT_CST (arg1),
1695 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1696 TYPE_SATURATING (type));
1697 t = build_fixed (type, value);
1699 /* Propagate overflow flags. */
1700 if (overflow_p | TREE_OVERFLOW (arg1))
1701 TREE_OVERFLOW (t) = 1;
1702 return t;
1705 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1706 to a fixed-point type. */
1708 static tree
1709 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1711 FIXED_VALUE_TYPE value;
1712 tree t;
1713 bool overflow_p;
1715 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1716 &TREE_REAL_CST (arg1),
1717 TYPE_SATURATING (type));
1718 t = build_fixed (type, value);
1720 /* Propagate overflow flags. */
1721 if (overflow_p | TREE_OVERFLOW (arg1))
1722 TREE_OVERFLOW (t) = 1;
1723 return t;
1726 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1727 type TYPE. If no simplification can be done return NULL_TREE. */
1729 static tree
1730 fold_convert_const (enum tree_code code, tree type, tree arg1)
1732 if (TREE_TYPE (arg1) == type)
1733 return arg1;
1735 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1736 || TREE_CODE (type) == OFFSET_TYPE)
1738 if (TREE_CODE (arg1) == INTEGER_CST)
1739 return fold_convert_const_int_from_int (type, arg1);
1740 else if (TREE_CODE (arg1) == REAL_CST)
1741 return fold_convert_const_int_from_real (code, type, arg1);
1742 else if (TREE_CODE (arg1) == FIXED_CST)
1743 return fold_convert_const_int_from_fixed (type, arg1);
1745 else if (TREE_CODE (type) == REAL_TYPE)
1747 if (TREE_CODE (arg1) == INTEGER_CST)
1748 return build_real_from_int_cst (type, arg1);
1749 else if (TREE_CODE (arg1) == REAL_CST)
1750 return fold_convert_const_real_from_real (type, arg1);
1751 else if (TREE_CODE (arg1) == FIXED_CST)
1752 return fold_convert_const_real_from_fixed (type, arg1);
1754 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1756 if (TREE_CODE (arg1) == FIXED_CST)
1757 return fold_convert_const_fixed_from_fixed (type, arg1);
1758 else if (TREE_CODE (arg1) == INTEGER_CST)
1759 return fold_convert_const_fixed_from_int (type, arg1);
1760 else if (TREE_CODE (arg1) == REAL_CST)
1761 return fold_convert_const_fixed_from_real (type, arg1);
1763 return NULL_TREE;
1766 /* Construct a vector of zero elements of vector type TYPE. */
1768 static tree
1769 build_zero_vector (tree type)
1771 tree t;
1773 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1774 return build_vector_from_val (type, t);
1777 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1779 bool
1780 fold_convertible_p (const_tree type, const_tree arg)
1782 tree orig = TREE_TYPE (arg);
1784 if (type == orig)
1785 return true;
1787 if (TREE_CODE (arg) == ERROR_MARK
1788 || TREE_CODE (type) == ERROR_MARK
1789 || TREE_CODE (orig) == ERROR_MARK)
1790 return false;
1792 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1793 return true;
1795 switch (TREE_CODE (type))
1797 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1798 case POINTER_TYPE: case REFERENCE_TYPE:
1799 case OFFSET_TYPE:
1800 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1801 || TREE_CODE (orig) == OFFSET_TYPE)
1802 return true;
1803 return (TREE_CODE (orig) == VECTOR_TYPE
1804 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1806 case REAL_TYPE:
1807 case FIXED_POINT_TYPE:
1808 case COMPLEX_TYPE:
1809 case VECTOR_TYPE:
1810 case VOID_TYPE:
1811 return TREE_CODE (type) == TREE_CODE (orig);
1813 default:
1814 return false;
1818 /* Convert expression ARG to type TYPE. Used by the middle-end for
1819 simple conversions in preference to calling the front-end's convert. */
1821 tree
1822 fold_convert_loc (location_t loc, tree type, tree arg)
1824 tree orig = TREE_TYPE (arg);
1825 tree tem;
1827 if (type == orig)
1828 return arg;
1830 if (TREE_CODE (arg) == ERROR_MARK
1831 || TREE_CODE (type) == ERROR_MARK
1832 || TREE_CODE (orig) == ERROR_MARK)
1833 return error_mark_node;
1835 switch (TREE_CODE (type))
1837 case POINTER_TYPE:
1838 case REFERENCE_TYPE:
1839 /* Handle conversions between pointers to different address spaces. */
1840 if (POINTER_TYPE_P (orig)
1841 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1842 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1843 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1844 /* fall through */
1846 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1847 case OFFSET_TYPE:
1848 if (TREE_CODE (arg) == INTEGER_CST)
1850 tem = fold_convert_const (NOP_EXPR, type, arg);
1851 if (tem != NULL_TREE)
1852 return tem;
1854 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1855 || TREE_CODE (orig) == OFFSET_TYPE)
1856 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1857 if (TREE_CODE (orig) == COMPLEX_TYPE)
1858 return fold_convert_loc (loc, type,
1859 fold_build1_loc (loc, REALPART_EXPR,
1860 TREE_TYPE (orig), arg));
1861 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1862 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1863 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1865 case REAL_TYPE:
1866 if (TREE_CODE (arg) == INTEGER_CST)
1868 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1869 if (tem != NULL_TREE)
1870 return tem;
1872 else if (TREE_CODE (arg) == REAL_CST)
1874 tem = fold_convert_const (NOP_EXPR, type, arg);
1875 if (tem != NULL_TREE)
1876 return tem;
1878 else if (TREE_CODE (arg) == FIXED_CST)
1880 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1881 if (tem != NULL_TREE)
1882 return tem;
1885 switch (TREE_CODE (orig))
1887 case INTEGER_TYPE:
1888 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1889 case POINTER_TYPE: case REFERENCE_TYPE:
1890 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1892 case REAL_TYPE:
1893 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1895 case FIXED_POINT_TYPE:
1896 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1898 case COMPLEX_TYPE:
1899 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1900 return fold_convert_loc (loc, type, tem);
1902 default:
1903 gcc_unreachable ();
1906 case FIXED_POINT_TYPE:
1907 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1908 || TREE_CODE (arg) == REAL_CST)
1910 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1911 if (tem != NULL_TREE)
1912 goto fold_convert_exit;
1915 switch (TREE_CODE (orig))
1917 case FIXED_POINT_TYPE:
1918 case INTEGER_TYPE:
1919 case ENUMERAL_TYPE:
1920 case BOOLEAN_TYPE:
1921 case REAL_TYPE:
1922 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1924 case COMPLEX_TYPE:
1925 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1926 return fold_convert_loc (loc, type, tem);
1928 default:
1929 gcc_unreachable ();
1932 case COMPLEX_TYPE:
1933 switch (TREE_CODE (orig))
1935 case INTEGER_TYPE:
1936 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1937 case POINTER_TYPE: case REFERENCE_TYPE:
1938 case REAL_TYPE:
1939 case FIXED_POINT_TYPE:
1940 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1941 fold_convert_loc (loc, TREE_TYPE (type), arg),
1942 fold_convert_loc (loc, TREE_TYPE (type),
1943 integer_zero_node));
1944 case COMPLEX_TYPE:
1946 tree rpart, ipart;
1948 if (TREE_CODE (arg) == COMPLEX_EXPR)
1950 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1951 TREE_OPERAND (arg, 0));
1952 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1953 TREE_OPERAND (arg, 1));
1954 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1957 arg = save_expr (arg);
1958 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1959 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1960 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1961 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1962 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1965 default:
1966 gcc_unreachable ();
1969 case VECTOR_TYPE:
1970 if (integer_zerop (arg))
1971 return build_zero_vector (type);
1972 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1973 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1974 || TREE_CODE (orig) == VECTOR_TYPE);
1975 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
1977 case VOID_TYPE:
1978 tem = fold_ignored_result (arg);
1979 return fold_build1_loc (loc, NOP_EXPR, type, tem);
1981 default:
1982 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1983 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1984 gcc_unreachable ();
1986 fold_convert_exit:
1987 protected_set_expr_location_unshare (tem, loc);
1988 return tem;
1991 /* Return false if expr can be assumed not to be an lvalue, true
1992 otherwise. */
1994 static bool
1995 maybe_lvalue_p (const_tree x)
1997 /* We only need to wrap lvalue tree codes. */
1998 switch (TREE_CODE (x))
2000 case VAR_DECL:
2001 case PARM_DECL:
2002 case RESULT_DECL:
2003 case LABEL_DECL:
2004 case FUNCTION_DECL:
2005 case SSA_NAME:
2007 case COMPONENT_REF:
2008 case MEM_REF:
2009 case INDIRECT_REF:
2010 case ARRAY_REF:
2011 case ARRAY_RANGE_REF:
2012 case BIT_FIELD_REF:
2013 case OBJ_TYPE_REF:
2015 case REALPART_EXPR:
2016 case IMAGPART_EXPR:
2017 case PREINCREMENT_EXPR:
2018 case PREDECREMENT_EXPR:
2019 case SAVE_EXPR:
2020 case TRY_CATCH_EXPR:
2021 case WITH_CLEANUP_EXPR:
2022 case COMPOUND_EXPR:
2023 case MODIFY_EXPR:
2024 case TARGET_EXPR:
2025 case COND_EXPR:
2026 case BIND_EXPR:
2027 break;
2029 default:
2030 /* Assume the worst for front-end tree codes. */
2031 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2032 break;
2033 return false;
2036 return true;
2039 /* Return an expr equal to X but certainly not valid as an lvalue. */
2041 tree
2042 non_lvalue_loc (location_t loc, tree x)
2044 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2045 us. */
2046 if (in_gimple_form)
2047 return x;
2049 if (! maybe_lvalue_p (x))
2050 return x;
2051 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2054 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2055 Zero means allow extended lvalues. */
2057 int pedantic_lvalues;
2059 /* When pedantic, return an expr equal to X but certainly not valid as a
2060 pedantic lvalue. Otherwise, return X. */
2062 static tree
2063 pedantic_non_lvalue_loc (location_t loc, tree x)
2065 if (pedantic_lvalues)
2066 return non_lvalue_loc (loc, x);
2068 return protected_set_expr_location_unshare (x, loc);
2071 /* Given a tree comparison code, return the code that is the logical inverse.
2072 It is generally not safe to do this for floating-point comparisons, except
2073 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2074 ERROR_MARK in this case. */
2076 enum tree_code
2077 invert_tree_comparison (enum tree_code code, bool honor_nans)
2079 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2080 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2081 return ERROR_MARK;
2083 switch (code)
2085 case EQ_EXPR:
2086 return NE_EXPR;
2087 case NE_EXPR:
2088 return EQ_EXPR;
2089 case GT_EXPR:
2090 return honor_nans ? UNLE_EXPR : LE_EXPR;
2091 case GE_EXPR:
2092 return honor_nans ? UNLT_EXPR : LT_EXPR;
2093 case LT_EXPR:
2094 return honor_nans ? UNGE_EXPR : GE_EXPR;
2095 case LE_EXPR:
2096 return honor_nans ? UNGT_EXPR : GT_EXPR;
2097 case LTGT_EXPR:
2098 return UNEQ_EXPR;
2099 case UNEQ_EXPR:
2100 return LTGT_EXPR;
2101 case UNGT_EXPR:
2102 return LE_EXPR;
2103 case UNGE_EXPR:
2104 return LT_EXPR;
2105 case UNLT_EXPR:
2106 return GE_EXPR;
2107 case UNLE_EXPR:
2108 return GT_EXPR;
2109 case ORDERED_EXPR:
2110 return UNORDERED_EXPR;
2111 case UNORDERED_EXPR:
2112 return ORDERED_EXPR;
2113 default:
2114 gcc_unreachable ();
2118 /* Similar, but return the comparison that results if the operands are
2119 swapped. This is safe for floating-point. */
2121 enum tree_code
2122 swap_tree_comparison (enum tree_code code)
2124 switch (code)
2126 case EQ_EXPR:
2127 case NE_EXPR:
2128 case ORDERED_EXPR:
2129 case UNORDERED_EXPR:
2130 case LTGT_EXPR:
2131 case UNEQ_EXPR:
2132 return code;
2133 case GT_EXPR:
2134 return LT_EXPR;
2135 case GE_EXPR:
2136 return LE_EXPR;
2137 case LT_EXPR:
2138 return GT_EXPR;
2139 case LE_EXPR:
2140 return GE_EXPR;
2141 case UNGT_EXPR:
2142 return UNLT_EXPR;
2143 case UNGE_EXPR:
2144 return UNLE_EXPR;
2145 case UNLT_EXPR:
2146 return UNGT_EXPR;
2147 case UNLE_EXPR:
2148 return UNGE_EXPR;
2149 default:
2150 gcc_unreachable ();
2155 /* Convert a comparison tree code from an enum tree_code representation
2156 into a compcode bit-based encoding. This function is the inverse of
2157 compcode_to_comparison. */
2159 static enum comparison_code
2160 comparison_to_compcode (enum tree_code code)
2162 switch (code)
2164 case LT_EXPR:
2165 return COMPCODE_LT;
2166 case EQ_EXPR:
2167 return COMPCODE_EQ;
2168 case LE_EXPR:
2169 return COMPCODE_LE;
2170 case GT_EXPR:
2171 return COMPCODE_GT;
2172 case NE_EXPR:
2173 return COMPCODE_NE;
2174 case GE_EXPR:
2175 return COMPCODE_GE;
2176 case ORDERED_EXPR:
2177 return COMPCODE_ORD;
2178 case UNORDERED_EXPR:
2179 return COMPCODE_UNORD;
2180 case UNLT_EXPR:
2181 return COMPCODE_UNLT;
2182 case UNEQ_EXPR:
2183 return COMPCODE_UNEQ;
2184 case UNLE_EXPR:
2185 return COMPCODE_UNLE;
2186 case UNGT_EXPR:
2187 return COMPCODE_UNGT;
2188 case LTGT_EXPR:
2189 return COMPCODE_LTGT;
2190 case UNGE_EXPR:
2191 return COMPCODE_UNGE;
2192 default:
2193 gcc_unreachable ();
2197 /* Convert a compcode bit-based encoding of a comparison operator back
2198 to GCC's enum tree_code representation. This function is the
2199 inverse of comparison_to_compcode. */
2201 static enum tree_code
2202 compcode_to_comparison (enum comparison_code code)
2204 switch (code)
2206 case COMPCODE_LT:
2207 return LT_EXPR;
2208 case COMPCODE_EQ:
2209 return EQ_EXPR;
2210 case COMPCODE_LE:
2211 return LE_EXPR;
2212 case COMPCODE_GT:
2213 return GT_EXPR;
2214 case COMPCODE_NE:
2215 return NE_EXPR;
2216 case COMPCODE_GE:
2217 return GE_EXPR;
2218 case COMPCODE_ORD:
2219 return ORDERED_EXPR;
2220 case COMPCODE_UNORD:
2221 return UNORDERED_EXPR;
2222 case COMPCODE_UNLT:
2223 return UNLT_EXPR;
2224 case COMPCODE_UNEQ:
2225 return UNEQ_EXPR;
2226 case COMPCODE_UNLE:
2227 return UNLE_EXPR;
2228 case COMPCODE_UNGT:
2229 return UNGT_EXPR;
2230 case COMPCODE_LTGT:
2231 return LTGT_EXPR;
2232 case COMPCODE_UNGE:
2233 return UNGE_EXPR;
2234 default:
2235 gcc_unreachable ();
2239 /* Return a tree for the comparison which is the combination of
2240 doing the AND or OR (depending on CODE) of the two operations LCODE
2241 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2242 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2243 if this makes the transformation invalid. */
2245 tree
2246 combine_comparisons (location_t loc,
2247 enum tree_code code, enum tree_code lcode,
2248 enum tree_code rcode, tree truth_type,
2249 tree ll_arg, tree lr_arg)
2251 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2252 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2253 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2254 int compcode;
2256 switch (code)
2258 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2259 compcode = lcompcode & rcompcode;
2260 break;
2262 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2263 compcode = lcompcode | rcompcode;
2264 break;
2266 default:
2267 return NULL_TREE;
2270 if (!honor_nans)
2272 /* Eliminate unordered comparisons, as well as LTGT and ORD
2273 which are not used unless the mode has NaNs. */
2274 compcode &= ~COMPCODE_UNORD;
2275 if (compcode == COMPCODE_LTGT)
2276 compcode = COMPCODE_NE;
2277 else if (compcode == COMPCODE_ORD)
2278 compcode = COMPCODE_TRUE;
2280 else if (flag_trapping_math)
2282 /* Check that the original operation and the optimized ones will trap
2283 under the same condition. */
2284 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2285 && (lcompcode != COMPCODE_EQ)
2286 && (lcompcode != COMPCODE_ORD);
2287 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2288 && (rcompcode != COMPCODE_EQ)
2289 && (rcompcode != COMPCODE_ORD);
2290 bool trap = (compcode & COMPCODE_UNORD) == 0
2291 && (compcode != COMPCODE_EQ)
2292 && (compcode != COMPCODE_ORD);
2294 /* In a short-circuited boolean expression the LHS might be
2295 such that the RHS, if evaluated, will never trap. For
2296 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2297 if neither x nor y is NaN. (This is a mixed blessing: for
2298 example, the expression above will never trap, hence
2299 optimizing it to x < y would be invalid). */
2300 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2301 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2302 rtrap = false;
2304 /* If the comparison was short-circuited, and only the RHS
2305 trapped, we may now generate a spurious trap. */
2306 if (rtrap && !ltrap
2307 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2308 return NULL_TREE;
2310 /* If we changed the conditions that cause a trap, we lose. */
2311 if ((ltrap || rtrap) != trap)
2312 return NULL_TREE;
2315 if (compcode == COMPCODE_TRUE)
2316 return constant_boolean_node (true, truth_type);
2317 else if (compcode == COMPCODE_FALSE)
2318 return constant_boolean_node (false, truth_type);
2319 else
2321 enum tree_code tcode;
2323 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2324 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2328 /* Return nonzero if two operands (typically of the same tree node)
2329 are necessarily equal. If either argument has side-effects this
2330 function returns zero. FLAGS modifies behavior as follows:
2332 If OEP_ONLY_CONST is set, only return nonzero for constants.
2333 This function tests whether the operands are indistinguishable;
2334 it does not test whether they are equal using C's == operation.
2335 The distinction is important for IEEE floating point, because
2336 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2337 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2339 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2340 even though it may hold multiple values during a function.
2341 This is because a GCC tree node guarantees that nothing else is
2342 executed between the evaluation of its "operands" (which may often
2343 be evaluated in arbitrary order). Hence if the operands themselves
2344 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2345 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2346 unset means assuming isochronic (or instantaneous) tree equivalence.
2347 Unless comparing arbitrary expression trees, such as from different
2348 statements, this flag can usually be left unset.
2350 If OEP_PURE_SAME is set, then pure functions with identical arguments
2351 are considered the same. It is used when the caller has other ways
2352 to ensure that global memory is unchanged in between. */
2355 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2357 /* If either is ERROR_MARK, they aren't equal. */
2358 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2359 || TREE_TYPE (arg0) == error_mark_node
2360 || TREE_TYPE (arg1) == error_mark_node)
2361 return 0;
2363 /* Similar, if either does not have a type (like a released SSA name),
2364 they aren't equal. */
2365 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2366 return 0;
2368 /* Check equality of integer constants before bailing out due to
2369 precision differences. */
2370 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2371 return tree_int_cst_equal (arg0, arg1);
2373 /* If both types don't have the same signedness, then we can't consider
2374 them equal. We must check this before the STRIP_NOPS calls
2375 because they may change the signedness of the arguments. As pointers
2376 strictly don't have a signedness, require either two pointers or
2377 two non-pointers as well. */
2378 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2379 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2380 return 0;
2382 /* We cannot consider pointers to different address space equal. */
2383 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2384 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2385 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2386 return 0;
2388 /* If both types don't have the same precision, then it is not safe
2389 to strip NOPs. */
2390 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2391 return 0;
2393 STRIP_NOPS (arg0);
2394 STRIP_NOPS (arg1);
2396 /* In case both args are comparisons but with different comparison
2397 code, try to swap the comparison operands of one arg to produce
2398 a match and compare that variant. */
2399 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2400 && COMPARISON_CLASS_P (arg0)
2401 && COMPARISON_CLASS_P (arg1))
2403 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2405 if (TREE_CODE (arg0) == swap_code)
2406 return operand_equal_p (TREE_OPERAND (arg0, 0),
2407 TREE_OPERAND (arg1, 1), flags)
2408 && operand_equal_p (TREE_OPERAND (arg0, 1),
2409 TREE_OPERAND (arg1, 0), flags);
2412 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2413 /* This is needed for conversions and for COMPONENT_REF.
2414 Might as well play it safe and always test this. */
2415 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2416 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2417 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2418 return 0;
2420 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2421 We don't care about side effects in that case because the SAVE_EXPR
2422 takes care of that for us. In all other cases, two expressions are
2423 equal if they have no side effects. If we have two identical
2424 expressions with side effects that should be treated the same due
2425 to the only side effects being identical SAVE_EXPR's, that will
2426 be detected in the recursive calls below.
2427 If we are taking an invariant address of two identical objects
2428 they are necessarily equal as well. */
2429 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2430 && (TREE_CODE (arg0) == SAVE_EXPR
2431 || (flags & OEP_CONSTANT_ADDRESS_OF)
2432 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2433 return 1;
2435 /* Next handle constant cases, those for which we can return 1 even
2436 if ONLY_CONST is set. */
2437 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2438 switch (TREE_CODE (arg0))
2440 case INTEGER_CST:
2441 return tree_int_cst_equal (arg0, arg1);
2443 case FIXED_CST:
2444 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2445 TREE_FIXED_CST (arg1));
2447 case REAL_CST:
2448 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2449 TREE_REAL_CST (arg1)))
2450 return 1;
2453 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2455 /* If we do not distinguish between signed and unsigned zero,
2456 consider them equal. */
2457 if (real_zerop (arg0) && real_zerop (arg1))
2458 return 1;
2460 return 0;
2462 case VECTOR_CST:
2464 unsigned i;
2466 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2467 return 0;
2469 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2471 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2472 VECTOR_CST_ELT (arg1, i), flags))
2473 return 0;
2475 return 1;
2478 case COMPLEX_CST:
2479 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2480 flags)
2481 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2482 flags));
2484 case STRING_CST:
2485 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2486 && ! memcmp (TREE_STRING_POINTER (arg0),
2487 TREE_STRING_POINTER (arg1),
2488 TREE_STRING_LENGTH (arg0)));
2490 case ADDR_EXPR:
2491 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2492 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2493 ? OEP_CONSTANT_ADDRESS_OF : 0);
2494 default:
2495 break;
2498 if (flags & OEP_ONLY_CONST)
2499 return 0;
2501 /* Define macros to test an operand from arg0 and arg1 for equality and a
2502 variant that allows null and views null as being different from any
2503 non-null value. In the latter case, if either is null, the both
2504 must be; otherwise, do the normal comparison. */
2505 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2506 TREE_OPERAND (arg1, N), flags)
2508 #define OP_SAME_WITH_NULL(N) \
2509 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2510 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2512 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2514 case tcc_unary:
2515 /* Two conversions are equal only if signedness and modes match. */
2516 switch (TREE_CODE (arg0))
2518 CASE_CONVERT:
2519 case FIX_TRUNC_EXPR:
2520 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2521 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2522 return 0;
2523 break;
2524 default:
2525 break;
2528 return OP_SAME (0);
2531 case tcc_comparison:
2532 case tcc_binary:
2533 if (OP_SAME (0) && OP_SAME (1))
2534 return 1;
2536 /* For commutative ops, allow the other order. */
2537 return (commutative_tree_code (TREE_CODE (arg0))
2538 && operand_equal_p (TREE_OPERAND (arg0, 0),
2539 TREE_OPERAND (arg1, 1), flags)
2540 && operand_equal_p (TREE_OPERAND (arg0, 1),
2541 TREE_OPERAND (arg1, 0), flags));
2543 case tcc_reference:
2544 /* If either of the pointer (or reference) expressions we are
2545 dereferencing contain a side effect, these cannot be equal. */
2546 if (TREE_SIDE_EFFECTS (arg0)
2547 || TREE_SIDE_EFFECTS (arg1))
2548 return 0;
2550 switch (TREE_CODE (arg0))
2552 case INDIRECT_REF:
2553 case REALPART_EXPR:
2554 case IMAGPART_EXPR:
2555 return OP_SAME (0);
2557 case TARGET_MEM_REF:
2558 /* Require equal extra operands and then fall through to MEM_REF
2559 handling of the two common operands. */
2560 if (!OP_SAME_WITH_NULL (2)
2561 || !OP_SAME_WITH_NULL (3)
2562 || !OP_SAME_WITH_NULL (4))
2563 return 0;
2564 /* Fallthru. */
2565 case MEM_REF:
2566 /* Require equal access sizes, and similar pointer types.
2567 We can have incomplete types for array references of
2568 variable-sized arrays from the Fortran frontent
2569 though. */
2570 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2571 || (TYPE_SIZE (TREE_TYPE (arg0))
2572 && TYPE_SIZE (TREE_TYPE (arg1))
2573 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2574 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2575 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2576 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2577 && OP_SAME (0) && OP_SAME (1));
2579 case ARRAY_REF:
2580 case ARRAY_RANGE_REF:
2581 /* Operands 2 and 3 may be null.
2582 Compare the array index by value if it is constant first as we
2583 may have different types but same value here. */
2584 return (OP_SAME (0)
2585 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2586 TREE_OPERAND (arg1, 1))
2587 || OP_SAME (1))
2588 && OP_SAME_WITH_NULL (2)
2589 && OP_SAME_WITH_NULL (3));
2591 case COMPONENT_REF:
2592 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2593 may be NULL when we're called to compare MEM_EXPRs. */
2594 return OP_SAME_WITH_NULL (0)
2595 && OP_SAME (1)
2596 && OP_SAME_WITH_NULL (2);
2598 case BIT_FIELD_REF:
2599 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2601 default:
2602 return 0;
2605 case tcc_expression:
2606 switch (TREE_CODE (arg0))
2608 case ADDR_EXPR:
2609 case TRUTH_NOT_EXPR:
2610 return OP_SAME (0);
2612 case TRUTH_ANDIF_EXPR:
2613 case TRUTH_ORIF_EXPR:
2614 return OP_SAME (0) && OP_SAME (1);
2616 case FMA_EXPR:
2617 case WIDEN_MULT_PLUS_EXPR:
2618 case WIDEN_MULT_MINUS_EXPR:
2619 if (!OP_SAME (2))
2620 return 0;
2621 /* The multiplcation operands are commutative. */
2622 /* FALLTHRU */
2624 case TRUTH_AND_EXPR:
2625 case TRUTH_OR_EXPR:
2626 case TRUTH_XOR_EXPR:
2627 if (OP_SAME (0) && OP_SAME (1))
2628 return 1;
2630 /* Otherwise take into account this is a commutative operation. */
2631 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2632 TREE_OPERAND (arg1, 1), flags)
2633 && operand_equal_p (TREE_OPERAND (arg0, 1),
2634 TREE_OPERAND (arg1, 0), flags));
2636 case COND_EXPR:
2637 case VEC_COND_EXPR:
2638 case DOT_PROD_EXPR:
2639 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2641 default:
2642 return 0;
2645 case tcc_vl_exp:
2646 switch (TREE_CODE (arg0))
2648 case CALL_EXPR:
2649 /* If the CALL_EXPRs call different functions, then they
2650 clearly can not be equal. */
2651 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2652 flags))
2653 return 0;
2656 unsigned int cef = call_expr_flags (arg0);
2657 if (flags & OEP_PURE_SAME)
2658 cef &= ECF_CONST | ECF_PURE;
2659 else
2660 cef &= ECF_CONST;
2661 if (!cef)
2662 return 0;
2665 /* Now see if all the arguments are the same. */
2667 const_call_expr_arg_iterator iter0, iter1;
2668 const_tree a0, a1;
2669 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2670 a1 = first_const_call_expr_arg (arg1, &iter1);
2671 a0 && a1;
2672 a0 = next_const_call_expr_arg (&iter0),
2673 a1 = next_const_call_expr_arg (&iter1))
2674 if (! operand_equal_p (a0, a1, flags))
2675 return 0;
2677 /* If we get here and both argument lists are exhausted
2678 then the CALL_EXPRs are equal. */
2679 return ! (a0 || a1);
2681 default:
2682 return 0;
2685 case tcc_declaration:
2686 /* Consider __builtin_sqrt equal to sqrt. */
2687 return (TREE_CODE (arg0) == FUNCTION_DECL
2688 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2689 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2690 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2692 default:
2693 return 0;
2696 #undef OP_SAME
2697 #undef OP_SAME_WITH_NULL
2700 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2701 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2703 When in doubt, return 0. */
2705 static int
2706 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2708 int unsignedp1, unsignedpo;
2709 tree primarg0, primarg1, primother;
2710 unsigned int correct_width;
2712 if (operand_equal_p (arg0, arg1, 0))
2713 return 1;
2715 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2716 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2717 return 0;
2719 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2720 and see if the inner values are the same. This removes any
2721 signedness comparison, which doesn't matter here. */
2722 primarg0 = arg0, primarg1 = arg1;
2723 STRIP_NOPS (primarg0);
2724 STRIP_NOPS (primarg1);
2725 if (operand_equal_p (primarg0, primarg1, 0))
2726 return 1;
2728 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2729 actual comparison operand, ARG0.
2731 First throw away any conversions to wider types
2732 already present in the operands. */
2734 primarg1 = get_narrower (arg1, &unsignedp1);
2735 primother = get_narrower (other, &unsignedpo);
2737 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2738 if (unsignedp1 == unsignedpo
2739 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2740 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2742 tree type = TREE_TYPE (arg0);
2744 /* Make sure shorter operand is extended the right way
2745 to match the longer operand. */
2746 primarg1 = fold_convert (signed_or_unsigned_type_for
2747 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2749 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2750 return 1;
2753 return 0;
2756 /* See if ARG is an expression that is either a comparison or is performing
2757 arithmetic on comparisons. The comparisons must only be comparing
2758 two different values, which will be stored in *CVAL1 and *CVAL2; if
2759 they are nonzero it means that some operands have already been found.
2760 No variables may be used anywhere else in the expression except in the
2761 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2762 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2764 If this is true, return 1. Otherwise, return zero. */
2766 static int
2767 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2769 enum tree_code code = TREE_CODE (arg);
2770 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2772 /* We can handle some of the tcc_expression cases here. */
2773 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2774 tclass = tcc_unary;
2775 else if (tclass == tcc_expression
2776 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2777 || code == COMPOUND_EXPR))
2778 tclass = tcc_binary;
2780 else if (tclass == tcc_expression && code == SAVE_EXPR
2781 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2783 /* If we've already found a CVAL1 or CVAL2, this expression is
2784 two complex to handle. */
2785 if (*cval1 || *cval2)
2786 return 0;
2788 tclass = tcc_unary;
2789 *save_p = 1;
2792 switch (tclass)
2794 case tcc_unary:
2795 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2797 case tcc_binary:
2798 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2799 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2800 cval1, cval2, save_p));
2802 case tcc_constant:
2803 return 1;
2805 case tcc_expression:
2806 if (code == COND_EXPR)
2807 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2808 cval1, cval2, save_p)
2809 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2810 cval1, cval2, save_p)
2811 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2812 cval1, cval2, save_p));
2813 return 0;
2815 case tcc_comparison:
2816 /* First see if we can handle the first operand, then the second. For
2817 the second operand, we know *CVAL1 can't be zero. It must be that
2818 one side of the comparison is each of the values; test for the
2819 case where this isn't true by failing if the two operands
2820 are the same. */
2822 if (operand_equal_p (TREE_OPERAND (arg, 0),
2823 TREE_OPERAND (arg, 1), 0))
2824 return 0;
2826 if (*cval1 == 0)
2827 *cval1 = TREE_OPERAND (arg, 0);
2828 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2830 else if (*cval2 == 0)
2831 *cval2 = TREE_OPERAND (arg, 0);
2832 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2834 else
2835 return 0;
2837 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2839 else if (*cval2 == 0)
2840 *cval2 = TREE_OPERAND (arg, 1);
2841 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2843 else
2844 return 0;
2846 return 1;
2848 default:
2849 return 0;
2853 /* ARG is a tree that is known to contain just arithmetic operations and
2854 comparisons. Evaluate the operations in the tree substituting NEW0 for
2855 any occurrence of OLD0 as an operand of a comparison and likewise for
2856 NEW1 and OLD1. */
2858 static tree
2859 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2860 tree old1, tree new1)
2862 tree type = TREE_TYPE (arg);
2863 enum tree_code code = TREE_CODE (arg);
2864 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2866 /* We can handle some of the tcc_expression cases here. */
2867 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2868 tclass = tcc_unary;
2869 else if (tclass == tcc_expression
2870 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2871 tclass = tcc_binary;
2873 switch (tclass)
2875 case tcc_unary:
2876 return fold_build1_loc (loc, code, type,
2877 eval_subst (loc, TREE_OPERAND (arg, 0),
2878 old0, new0, old1, new1));
2880 case tcc_binary:
2881 return fold_build2_loc (loc, code, type,
2882 eval_subst (loc, TREE_OPERAND (arg, 0),
2883 old0, new0, old1, new1),
2884 eval_subst (loc, TREE_OPERAND (arg, 1),
2885 old0, new0, old1, new1));
2887 case tcc_expression:
2888 switch (code)
2890 case SAVE_EXPR:
2891 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2892 old1, new1);
2894 case COMPOUND_EXPR:
2895 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2896 old1, new1);
2898 case COND_EXPR:
2899 return fold_build3_loc (loc, code, type,
2900 eval_subst (loc, TREE_OPERAND (arg, 0),
2901 old0, new0, old1, new1),
2902 eval_subst (loc, TREE_OPERAND (arg, 1),
2903 old0, new0, old1, new1),
2904 eval_subst (loc, TREE_OPERAND (arg, 2),
2905 old0, new0, old1, new1));
2906 default:
2907 break;
2909 /* Fall through - ??? */
2911 case tcc_comparison:
2913 tree arg0 = TREE_OPERAND (arg, 0);
2914 tree arg1 = TREE_OPERAND (arg, 1);
2916 /* We need to check both for exact equality and tree equality. The
2917 former will be true if the operand has a side-effect. In that
2918 case, we know the operand occurred exactly once. */
2920 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2921 arg0 = new0;
2922 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2923 arg0 = new1;
2925 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2926 arg1 = new0;
2927 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2928 arg1 = new1;
2930 return fold_build2_loc (loc, code, type, arg0, arg1);
2933 default:
2934 return arg;
2938 /* Return a tree for the case when the result of an expression is RESULT
2939 converted to TYPE and OMITTED was previously an operand of the expression
2940 but is now not needed (e.g., we folded OMITTED * 0).
2942 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2943 the conversion of RESULT to TYPE. */
2945 tree
2946 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2948 tree t = fold_convert_loc (loc, type, result);
2950 /* If the resulting operand is an empty statement, just return the omitted
2951 statement casted to void. */
2952 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2953 return build1_loc (loc, NOP_EXPR, void_type_node,
2954 fold_ignored_result (omitted));
2956 if (TREE_SIDE_EFFECTS (omitted))
2957 return build2_loc (loc, COMPOUND_EXPR, type,
2958 fold_ignored_result (omitted), t);
2960 return non_lvalue_loc (loc, t);
2963 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2965 static tree
2966 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2967 tree omitted)
2969 tree t = fold_convert_loc (loc, type, result);
2971 /* If the resulting operand is an empty statement, just return the omitted
2972 statement casted to void. */
2973 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2974 return build1_loc (loc, NOP_EXPR, void_type_node,
2975 fold_ignored_result (omitted));
2977 if (TREE_SIDE_EFFECTS (omitted))
2978 return build2_loc (loc, COMPOUND_EXPR, type,
2979 fold_ignored_result (omitted), t);
2981 return pedantic_non_lvalue_loc (loc, t);
2984 /* Return a tree for the case when the result of an expression is RESULT
2985 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2986 of the expression but are now not needed.
2988 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2989 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2990 evaluated before OMITTED2. Otherwise, if neither has side effects,
2991 just do the conversion of RESULT to TYPE. */
2993 tree
2994 omit_two_operands_loc (location_t loc, tree type, tree result,
2995 tree omitted1, tree omitted2)
2997 tree t = fold_convert_loc (loc, type, result);
2999 if (TREE_SIDE_EFFECTS (omitted2))
3000 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3001 if (TREE_SIDE_EFFECTS (omitted1))
3002 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3004 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3008 /* Return a simplified tree node for the truth-negation of ARG. This
3009 never alters ARG itself. We assume that ARG is an operation that
3010 returns a truth value (0 or 1).
3012 FIXME: one would think we would fold the result, but it causes
3013 problems with the dominator optimizer. */
3015 tree
3016 fold_truth_not_expr (location_t loc, tree arg)
3018 tree type = TREE_TYPE (arg);
3019 enum tree_code code = TREE_CODE (arg);
3020 location_t loc1, loc2;
3022 /* If this is a comparison, we can simply invert it, except for
3023 floating-point non-equality comparisons, in which case we just
3024 enclose a TRUTH_NOT_EXPR around what we have. */
3026 if (TREE_CODE_CLASS (code) == tcc_comparison)
3028 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3029 if (FLOAT_TYPE_P (op_type)
3030 && flag_trapping_math
3031 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3032 && code != NE_EXPR && code != EQ_EXPR)
3033 return NULL_TREE;
3035 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3036 if (code == ERROR_MARK)
3037 return NULL_TREE;
3039 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3040 TREE_OPERAND (arg, 1));
3043 switch (code)
3045 case INTEGER_CST:
3046 return constant_boolean_node (integer_zerop (arg), type);
3048 case TRUTH_AND_EXPR:
3049 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3050 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3051 return build2_loc (loc, TRUTH_OR_EXPR, type,
3052 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3053 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3055 case TRUTH_OR_EXPR:
3056 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3057 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3058 return build2_loc (loc, TRUTH_AND_EXPR, type,
3059 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3060 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3062 case TRUTH_XOR_EXPR:
3063 /* Here we can invert either operand. We invert the first operand
3064 unless the second operand is a TRUTH_NOT_EXPR in which case our
3065 result is the XOR of the first operand with the inside of the
3066 negation of the second operand. */
3068 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3069 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3070 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3071 else
3072 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3073 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3074 TREE_OPERAND (arg, 1));
3076 case TRUTH_ANDIF_EXPR:
3077 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3078 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3079 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3080 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3081 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3083 case TRUTH_ORIF_EXPR:
3084 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3085 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3086 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3087 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3088 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3090 case TRUTH_NOT_EXPR:
3091 return TREE_OPERAND (arg, 0);
3093 case COND_EXPR:
3095 tree arg1 = TREE_OPERAND (arg, 1);
3096 tree arg2 = TREE_OPERAND (arg, 2);
3098 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3099 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3101 /* A COND_EXPR may have a throw as one operand, which
3102 then has void type. Just leave void operands
3103 as they are. */
3104 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3105 VOID_TYPE_P (TREE_TYPE (arg1))
3106 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3107 VOID_TYPE_P (TREE_TYPE (arg2))
3108 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3111 case COMPOUND_EXPR:
3112 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3113 return build2_loc (loc, COMPOUND_EXPR, type,
3114 TREE_OPERAND (arg, 0),
3115 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3117 case NON_LVALUE_EXPR:
3118 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3119 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3121 CASE_CONVERT:
3122 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3123 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3125 /* ... fall through ... */
3127 case FLOAT_EXPR:
3128 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3129 return build1_loc (loc, TREE_CODE (arg), type,
3130 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3132 case BIT_AND_EXPR:
3133 if (!integer_onep (TREE_OPERAND (arg, 1)))
3134 return NULL_TREE;
3135 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3137 case SAVE_EXPR:
3138 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3140 case CLEANUP_POINT_EXPR:
3141 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3142 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3143 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3145 default:
3146 return NULL_TREE;
3150 /* Return a simplified tree node for the truth-negation of ARG. This
3151 never alters ARG itself. We assume that ARG is an operation that
3152 returns a truth value (0 or 1).
3154 FIXME: one would think we would fold the result, but it causes
3155 problems with the dominator optimizer. */
3157 tree
3158 invert_truthvalue_loc (location_t loc, tree arg)
3160 tree tem;
3162 if (TREE_CODE (arg) == ERROR_MARK)
3163 return arg;
3165 tem = fold_truth_not_expr (loc, arg);
3166 if (!tem)
3167 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3169 return tem;
3172 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3173 operands are another bit-wise operation with a common input. If so,
3174 distribute the bit operations to save an operation and possibly two if
3175 constants are involved. For example, convert
3176 (A | B) & (A | C) into A | (B & C)
3177 Further simplification will occur if B and C are constants.
3179 If this optimization cannot be done, 0 will be returned. */
3181 static tree
3182 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3183 tree arg0, tree arg1)
3185 tree common;
3186 tree left, right;
3188 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3189 || TREE_CODE (arg0) == code
3190 || (TREE_CODE (arg0) != BIT_AND_EXPR
3191 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3192 return 0;
3194 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3196 common = TREE_OPERAND (arg0, 0);
3197 left = TREE_OPERAND (arg0, 1);
3198 right = TREE_OPERAND (arg1, 1);
3200 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3202 common = TREE_OPERAND (arg0, 0);
3203 left = TREE_OPERAND (arg0, 1);
3204 right = TREE_OPERAND (arg1, 0);
3206 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3208 common = TREE_OPERAND (arg0, 1);
3209 left = TREE_OPERAND (arg0, 0);
3210 right = TREE_OPERAND (arg1, 1);
3212 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3214 common = TREE_OPERAND (arg0, 1);
3215 left = TREE_OPERAND (arg0, 0);
3216 right = TREE_OPERAND (arg1, 0);
3218 else
3219 return 0;
3221 common = fold_convert_loc (loc, type, common);
3222 left = fold_convert_loc (loc, type, left);
3223 right = fold_convert_loc (loc, type, right);
3224 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3225 fold_build2_loc (loc, code, type, left, right));
3228 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3229 with code CODE. This optimization is unsafe. */
3230 static tree
3231 distribute_real_division (location_t loc, enum tree_code code, tree type,
3232 tree arg0, tree arg1)
3234 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3235 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3237 /* (A / C) +- (B / C) -> (A +- B) / C. */
3238 if (mul0 == mul1
3239 && operand_equal_p (TREE_OPERAND (arg0, 1),
3240 TREE_OPERAND (arg1, 1), 0))
3241 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3242 fold_build2_loc (loc, code, type,
3243 TREE_OPERAND (arg0, 0),
3244 TREE_OPERAND (arg1, 0)),
3245 TREE_OPERAND (arg0, 1));
3247 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3248 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3249 TREE_OPERAND (arg1, 0), 0)
3250 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3251 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3253 REAL_VALUE_TYPE r0, r1;
3254 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3255 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3256 if (!mul0)
3257 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3258 if (!mul1)
3259 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3260 real_arithmetic (&r0, code, &r0, &r1);
3261 return fold_build2_loc (loc, MULT_EXPR, type,
3262 TREE_OPERAND (arg0, 0),
3263 build_real (type, r0));
3266 return NULL_TREE;
3269 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3270 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3272 static tree
3273 make_bit_field_ref (location_t loc, tree inner, tree type,
3274 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3276 tree result, bftype;
3278 if (bitpos == 0)
3280 tree size = TYPE_SIZE (TREE_TYPE (inner));
3281 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3282 || POINTER_TYPE_P (TREE_TYPE (inner)))
3283 && host_integerp (size, 0)
3284 && tree_low_cst (size, 0) == bitsize)
3285 return fold_convert_loc (loc, type, inner);
3288 bftype = type;
3289 if (TYPE_PRECISION (bftype) != bitsize
3290 || TYPE_UNSIGNED (bftype) == !unsignedp)
3291 bftype = build_nonstandard_integer_type (bitsize, 0);
3293 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3294 size_int (bitsize), bitsize_int (bitpos));
3296 if (bftype != type)
3297 result = fold_convert_loc (loc, type, result);
3299 return result;
3302 /* Optimize a bit-field compare.
3304 There are two cases: First is a compare against a constant and the
3305 second is a comparison of two items where the fields are at the same
3306 bit position relative to the start of a chunk (byte, halfword, word)
3307 large enough to contain it. In these cases we can avoid the shift
3308 implicit in bitfield extractions.
3310 For constants, we emit a compare of the shifted constant with the
3311 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3312 compared. For two fields at the same position, we do the ANDs with the
3313 similar mask and compare the result of the ANDs.
3315 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3316 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3317 are the left and right operands of the comparison, respectively.
3319 If the optimization described above can be done, we return the resulting
3320 tree. Otherwise we return zero. */
3322 static tree
3323 optimize_bit_field_compare (location_t loc, enum tree_code code,
3324 tree compare_type, tree lhs, tree rhs)
3326 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3327 tree type = TREE_TYPE (lhs);
3328 tree signed_type, unsigned_type;
3329 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3330 enum machine_mode lmode, rmode, nmode;
3331 int lunsignedp, runsignedp;
3332 int lvolatilep = 0, rvolatilep = 0;
3333 tree linner, rinner = NULL_TREE;
3334 tree mask;
3335 tree offset;
3337 /* In the strict volatile bitfields case, doing code changes here may prevent
3338 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3339 if (flag_strict_volatile_bitfields > 0)
3340 return 0;
3342 /* Get all the information about the extractions being done. If the bit size
3343 if the same as the size of the underlying object, we aren't doing an
3344 extraction at all and so can do nothing. We also don't want to
3345 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3346 then will no longer be able to replace it. */
3347 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3348 &lunsignedp, &lvolatilep, false);
3349 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3350 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3351 return 0;
3353 if (!const_p)
3355 /* If this is not a constant, we can only do something if bit positions,
3356 sizes, and signedness are the same. */
3357 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3358 &runsignedp, &rvolatilep, false);
3360 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3361 || lunsignedp != runsignedp || offset != 0
3362 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3363 return 0;
3366 /* See if we can find a mode to refer to this field. We should be able to,
3367 but fail if we can't. */
3368 if (lvolatilep
3369 && GET_MODE_BITSIZE (lmode) > 0
3370 && flag_strict_volatile_bitfields > 0)
3371 nmode = lmode;
3372 else
3373 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3374 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3375 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3376 TYPE_ALIGN (TREE_TYPE (rinner))),
3377 word_mode, lvolatilep || rvolatilep);
3378 if (nmode == VOIDmode)
3379 return 0;
3381 /* Set signed and unsigned types of the precision of this mode for the
3382 shifts below. */
3383 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3384 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3386 /* Compute the bit position and size for the new reference and our offset
3387 within it. If the new reference is the same size as the original, we
3388 won't optimize anything, so return zero. */
3389 nbitsize = GET_MODE_BITSIZE (nmode);
3390 nbitpos = lbitpos & ~ (nbitsize - 1);
3391 lbitpos -= nbitpos;
3392 if (nbitsize == lbitsize)
3393 return 0;
3395 if (BYTES_BIG_ENDIAN)
3396 lbitpos = nbitsize - lbitsize - lbitpos;
3398 /* Make the mask to be used against the extracted field. */
3399 mask = build_int_cst_type (unsigned_type, -1);
3400 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3401 mask = const_binop (RSHIFT_EXPR, mask,
3402 size_int (nbitsize - lbitsize - lbitpos));
3404 if (! const_p)
3405 /* If not comparing with constant, just rework the comparison
3406 and return. */
3407 return fold_build2_loc (loc, code, compare_type,
3408 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3409 make_bit_field_ref (loc, linner,
3410 unsigned_type,
3411 nbitsize, nbitpos,
3413 mask),
3414 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3415 make_bit_field_ref (loc, rinner,
3416 unsigned_type,
3417 nbitsize, nbitpos,
3419 mask));
3421 /* Otherwise, we are handling the constant case. See if the constant is too
3422 big for the field. Warn and return a tree of for 0 (false) if so. We do
3423 this not only for its own sake, but to avoid having to test for this
3424 error case below. If we didn't, we might generate wrong code.
3426 For unsigned fields, the constant shifted right by the field length should
3427 be all zero. For signed fields, the high-order bits should agree with
3428 the sign bit. */
3430 if (lunsignedp)
3432 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3433 fold_convert_loc (loc,
3434 unsigned_type, rhs),
3435 size_int (lbitsize))))
3437 warning (0, "comparison is always %d due to width of bit-field",
3438 code == NE_EXPR);
3439 return constant_boolean_node (code == NE_EXPR, compare_type);
3442 else
3444 tree tem = const_binop (RSHIFT_EXPR,
3445 fold_convert_loc (loc, signed_type, rhs),
3446 size_int (lbitsize - 1));
3447 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3449 warning (0, "comparison is always %d due to width of bit-field",
3450 code == NE_EXPR);
3451 return constant_boolean_node (code == NE_EXPR, compare_type);
3455 /* Single-bit compares should always be against zero. */
3456 if (lbitsize == 1 && ! integer_zerop (rhs))
3458 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3459 rhs = build_int_cst (type, 0);
3462 /* Make a new bitfield reference, shift the constant over the
3463 appropriate number of bits and mask it with the computed mask
3464 (in case this was a signed field). If we changed it, make a new one. */
3465 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3466 if (lvolatilep)
3468 TREE_SIDE_EFFECTS (lhs) = 1;
3469 TREE_THIS_VOLATILE (lhs) = 1;
3472 rhs = const_binop (BIT_AND_EXPR,
3473 const_binop (LSHIFT_EXPR,
3474 fold_convert_loc (loc, unsigned_type, rhs),
3475 size_int (lbitpos)),
3476 mask);
3478 lhs = build2_loc (loc, code, compare_type,
3479 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3480 return lhs;
3483 /* Subroutine for fold_truth_andor_1: decode a field reference.
3485 If EXP is a comparison reference, we return the innermost reference.
3487 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3488 set to the starting bit number.
3490 If the innermost field can be completely contained in a mode-sized
3491 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3493 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3494 otherwise it is not changed.
3496 *PUNSIGNEDP is set to the signedness of the field.
3498 *PMASK is set to the mask used. This is either contained in a
3499 BIT_AND_EXPR or derived from the width of the field.
3501 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3503 Return 0 if this is not a component reference or is one that we can't
3504 do anything with. */
3506 static tree
3507 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3508 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3509 int *punsignedp, int *pvolatilep,
3510 tree *pmask, tree *pand_mask)
3512 tree outer_type = 0;
3513 tree and_mask = 0;
3514 tree mask, inner, offset;
3515 tree unsigned_type;
3516 unsigned int precision;
3518 /* All the optimizations using this function assume integer fields.
3519 There are problems with FP fields since the type_for_size call
3520 below can fail for, e.g., XFmode. */
3521 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3522 return 0;
3524 /* We are interested in the bare arrangement of bits, so strip everything
3525 that doesn't affect the machine mode. However, record the type of the
3526 outermost expression if it may matter below. */
3527 if (CONVERT_EXPR_P (exp)
3528 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3529 outer_type = TREE_TYPE (exp);
3530 STRIP_NOPS (exp);
3532 if (TREE_CODE (exp) == BIT_AND_EXPR)
3534 and_mask = TREE_OPERAND (exp, 1);
3535 exp = TREE_OPERAND (exp, 0);
3536 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3537 if (TREE_CODE (and_mask) != INTEGER_CST)
3538 return 0;
3541 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3542 punsignedp, pvolatilep, false);
3543 if ((inner == exp && and_mask == 0)
3544 || *pbitsize < 0 || offset != 0
3545 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3546 return 0;
3548 /* If the number of bits in the reference is the same as the bitsize of
3549 the outer type, then the outer type gives the signedness. Otherwise
3550 (in case of a small bitfield) the signedness is unchanged. */
3551 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3552 *punsignedp = TYPE_UNSIGNED (outer_type);
3554 /* Compute the mask to access the bitfield. */
3555 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3556 precision = TYPE_PRECISION (unsigned_type);
3558 mask = build_int_cst_type (unsigned_type, -1);
3560 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3561 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3563 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3564 if (and_mask != 0)
3565 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3566 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3568 *pmask = mask;
3569 *pand_mask = and_mask;
3570 return inner;
3573 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3574 bit positions. */
3576 static int
3577 all_ones_mask_p (const_tree mask, int size)
3579 tree type = TREE_TYPE (mask);
3580 unsigned int precision = TYPE_PRECISION (type);
3581 tree tmask;
3583 tmask = build_int_cst_type (signed_type_for (type), -1);
3585 return
3586 tree_int_cst_equal (mask,
3587 const_binop (RSHIFT_EXPR,
3588 const_binop (LSHIFT_EXPR, tmask,
3589 size_int (precision - size)),
3590 size_int (precision - size)));
3593 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3594 represents the sign bit of EXP's type. If EXP represents a sign
3595 or zero extension, also test VAL against the unextended type.
3596 The return value is the (sub)expression whose sign bit is VAL,
3597 or NULL_TREE otherwise. */
3599 static tree
3600 sign_bit_p (tree exp, const_tree val)
3602 unsigned HOST_WIDE_INT mask_lo, lo;
3603 HOST_WIDE_INT mask_hi, hi;
3604 int width;
3605 tree t;
3607 /* Tree EXP must have an integral type. */
3608 t = TREE_TYPE (exp);
3609 if (! INTEGRAL_TYPE_P (t))
3610 return NULL_TREE;
3612 /* Tree VAL must be an integer constant. */
3613 if (TREE_CODE (val) != INTEGER_CST
3614 || TREE_OVERFLOW (val))
3615 return NULL_TREE;
3617 width = TYPE_PRECISION (t);
3618 if (width > HOST_BITS_PER_WIDE_INT)
3620 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3621 lo = 0;
3623 mask_hi = ((unsigned HOST_WIDE_INT) -1
3624 >> (HOST_BITS_PER_DOUBLE_INT - width));
3625 mask_lo = -1;
3627 else
3629 hi = 0;
3630 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3632 mask_hi = 0;
3633 mask_lo = ((unsigned HOST_WIDE_INT) -1
3634 >> (HOST_BITS_PER_WIDE_INT - width));
3637 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3638 treat VAL as if it were unsigned. */
3639 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3640 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3641 return exp;
3643 /* Handle extension from a narrower type. */
3644 if (TREE_CODE (exp) == NOP_EXPR
3645 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3646 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3648 return NULL_TREE;
3651 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3652 to be evaluated unconditionally. */
3654 static int
3655 simple_operand_p (const_tree exp)
3657 /* Strip any conversions that don't change the machine mode. */
3658 STRIP_NOPS (exp);
3660 return (CONSTANT_CLASS_P (exp)
3661 || TREE_CODE (exp) == SSA_NAME
3662 || (DECL_P (exp)
3663 && ! TREE_ADDRESSABLE (exp)
3664 && ! TREE_THIS_VOLATILE (exp)
3665 && ! DECL_NONLOCAL (exp)
3666 /* Don't regard global variables as simple. They may be
3667 allocated in ways unknown to the compiler (shared memory,
3668 #pragma weak, etc). */
3669 && ! TREE_PUBLIC (exp)
3670 && ! DECL_EXTERNAL (exp)
3671 /* Loading a static variable is unduly expensive, but global
3672 registers aren't expensive. */
3673 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3676 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3677 to be evaluated unconditionally.
3678 I addition to simple_operand_p, we assume that comparisons, conversions,
3679 and logic-not operations are simple, if their operands are simple, too. */
3681 static bool
3682 simple_operand_p_2 (tree exp)
3684 enum tree_code code;
3686 if (TREE_SIDE_EFFECTS (exp)
3687 || tree_could_trap_p (exp))
3688 return false;
3690 while (CONVERT_EXPR_P (exp))
3691 exp = TREE_OPERAND (exp, 0);
3693 code = TREE_CODE (exp);
3695 if (TREE_CODE_CLASS (code) == tcc_comparison)
3696 return (simple_operand_p (TREE_OPERAND (exp, 0))
3697 && simple_operand_p (TREE_OPERAND (exp, 1)));
3699 if (code == TRUTH_NOT_EXPR)
3700 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3702 return simple_operand_p (exp);
3706 /* The following functions are subroutines to fold_range_test and allow it to
3707 try to change a logical combination of comparisons into a range test.
3709 For example, both
3710 X == 2 || X == 3 || X == 4 || X == 5
3712 X >= 2 && X <= 5
3713 are converted to
3714 (unsigned) (X - 2) <= 3
3716 We describe each set of comparisons as being either inside or outside
3717 a range, using a variable named like IN_P, and then describe the
3718 range with a lower and upper bound. If one of the bounds is omitted,
3719 it represents either the highest or lowest value of the type.
3721 In the comments below, we represent a range by two numbers in brackets
3722 preceded by a "+" to designate being inside that range, or a "-" to
3723 designate being outside that range, so the condition can be inverted by
3724 flipping the prefix. An omitted bound is represented by a "-". For
3725 example, "- [-, 10]" means being outside the range starting at the lowest
3726 possible value and ending at 10, in other words, being greater than 10.
3727 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3728 always false.
3730 We set up things so that the missing bounds are handled in a consistent
3731 manner so neither a missing bound nor "true" and "false" need to be
3732 handled using a special case. */
3734 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3735 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3736 and UPPER1_P are nonzero if the respective argument is an upper bound
3737 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3738 must be specified for a comparison. ARG1 will be converted to ARG0's
3739 type if both are specified. */
3741 static tree
3742 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3743 tree arg1, int upper1_p)
3745 tree tem;
3746 int result;
3747 int sgn0, sgn1;
3749 /* If neither arg represents infinity, do the normal operation.
3750 Else, if not a comparison, return infinity. Else handle the special
3751 comparison rules. Note that most of the cases below won't occur, but
3752 are handled for consistency. */
3754 if (arg0 != 0 && arg1 != 0)
3756 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3757 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3758 STRIP_NOPS (tem);
3759 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3762 if (TREE_CODE_CLASS (code) != tcc_comparison)
3763 return 0;
3765 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3766 for neither. In real maths, we cannot assume open ended ranges are
3767 the same. But, this is computer arithmetic, where numbers are finite.
3768 We can therefore make the transformation of any unbounded range with
3769 the value Z, Z being greater than any representable number. This permits
3770 us to treat unbounded ranges as equal. */
3771 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3772 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3773 switch (code)
3775 case EQ_EXPR:
3776 result = sgn0 == sgn1;
3777 break;
3778 case NE_EXPR:
3779 result = sgn0 != sgn1;
3780 break;
3781 case LT_EXPR:
3782 result = sgn0 < sgn1;
3783 break;
3784 case LE_EXPR:
3785 result = sgn0 <= sgn1;
3786 break;
3787 case GT_EXPR:
3788 result = sgn0 > sgn1;
3789 break;
3790 case GE_EXPR:
3791 result = sgn0 >= sgn1;
3792 break;
3793 default:
3794 gcc_unreachable ();
3797 return constant_boolean_node (result, type);
3800 /* Helper routine for make_range. Perform one step for it, return
3801 new expression if the loop should continue or NULL_TREE if it should
3802 stop. */
3804 tree
3805 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3806 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3807 bool *strict_overflow_p)
3809 tree arg0_type = TREE_TYPE (arg0);
3810 tree n_low, n_high, low = *p_low, high = *p_high;
3811 int in_p = *p_in_p, n_in_p;
3813 switch (code)
3815 case TRUTH_NOT_EXPR:
3816 /* We can only do something if the range is testing for zero. */
3817 if (low == NULL_TREE || high == NULL_TREE
3818 || ! integer_zerop (low) || ! integer_zerop (high))
3819 return NULL_TREE;
3820 *p_in_p = ! in_p;
3821 return arg0;
3823 case EQ_EXPR: case NE_EXPR:
3824 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3825 /* We can only do something if the range is testing for zero
3826 and if the second operand is an integer constant. Note that
3827 saying something is "in" the range we make is done by
3828 complementing IN_P since it will set in the initial case of
3829 being not equal to zero; "out" is leaving it alone. */
3830 if (low == NULL_TREE || high == NULL_TREE
3831 || ! integer_zerop (low) || ! integer_zerop (high)
3832 || TREE_CODE (arg1) != INTEGER_CST)
3833 return NULL_TREE;
3835 switch (code)
3837 case NE_EXPR: /* - [c, c] */
3838 low = high = arg1;
3839 break;
3840 case EQ_EXPR: /* + [c, c] */
3841 in_p = ! in_p, low = high = arg1;
3842 break;
3843 case GT_EXPR: /* - [-, c] */
3844 low = 0, high = arg1;
3845 break;
3846 case GE_EXPR: /* + [c, -] */
3847 in_p = ! in_p, low = arg1, high = 0;
3848 break;
3849 case LT_EXPR: /* - [c, -] */
3850 low = arg1, high = 0;
3851 break;
3852 case LE_EXPR: /* + [-, c] */
3853 in_p = ! in_p, low = 0, high = arg1;
3854 break;
3855 default:
3856 gcc_unreachable ();
3859 /* If this is an unsigned comparison, we also know that EXP is
3860 greater than or equal to zero. We base the range tests we make
3861 on that fact, so we record it here so we can parse existing
3862 range tests. We test arg0_type since often the return type
3863 of, e.g. EQ_EXPR, is boolean. */
3864 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3866 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3867 in_p, low, high, 1,
3868 build_int_cst (arg0_type, 0),
3869 NULL_TREE))
3870 return NULL_TREE;
3872 in_p = n_in_p, low = n_low, high = n_high;
3874 /* If the high bound is missing, but we have a nonzero low
3875 bound, reverse the range so it goes from zero to the low bound
3876 minus 1. */
3877 if (high == 0 && low && ! integer_zerop (low))
3879 in_p = ! in_p;
3880 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3881 integer_one_node, 0);
3882 low = build_int_cst (arg0_type, 0);
3886 *p_low = low;
3887 *p_high = high;
3888 *p_in_p = in_p;
3889 return arg0;
3891 case NEGATE_EXPR:
3892 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3893 low and high are non-NULL, then normalize will DTRT. */
3894 if (!TYPE_UNSIGNED (arg0_type)
3895 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3897 if (low == NULL_TREE)
3898 low = TYPE_MIN_VALUE (arg0_type);
3899 if (high == NULL_TREE)
3900 high = TYPE_MAX_VALUE (arg0_type);
3903 /* (-x) IN [a,b] -> x in [-b, -a] */
3904 n_low = range_binop (MINUS_EXPR, exp_type,
3905 build_int_cst (exp_type, 0),
3906 0, high, 1);
3907 n_high = range_binop (MINUS_EXPR, exp_type,
3908 build_int_cst (exp_type, 0),
3909 0, low, 0);
3910 if (n_high != 0 && TREE_OVERFLOW (n_high))
3911 return NULL_TREE;
3912 goto normalize;
3914 case BIT_NOT_EXPR:
3915 /* ~ X -> -X - 1 */
3916 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3917 build_int_cst (exp_type, 1));
3919 case PLUS_EXPR:
3920 case MINUS_EXPR:
3921 if (TREE_CODE (arg1) != INTEGER_CST)
3922 return NULL_TREE;
3924 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3925 move a constant to the other side. */
3926 if (!TYPE_UNSIGNED (arg0_type)
3927 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3928 return NULL_TREE;
3930 /* If EXP is signed, any overflow in the computation is undefined,
3931 so we don't worry about it so long as our computations on
3932 the bounds don't overflow. For unsigned, overflow is defined
3933 and this is exactly the right thing. */
3934 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3935 arg0_type, low, 0, arg1, 0);
3936 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3937 arg0_type, high, 1, arg1, 0);
3938 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3939 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3940 return NULL_TREE;
3942 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3943 *strict_overflow_p = true;
3945 normalize:
3946 /* Check for an unsigned range which has wrapped around the maximum
3947 value thus making n_high < n_low, and normalize it. */
3948 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3950 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3951 integer_one_node, 0);
3952 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3953 integer_one_node, 0);
3955 /* If the range is of the form +/- [ x+1, x ], we won't
3956 be able to normalize it. But then, it represents the
3957 whole range or the empty set, so make it
3958 +/- [ -, - ]. */
3959 if (tree_int_cst_equal (n_low, low)
3960 && tree_int_cst_equal (n_high, high))
3961 low = high = 0;
3962 else
3963 in_p = ! in_p;
3965 else
3966 low = n_low, high = n_high;
3968 *p_low = low;
3969 *p_high = high;
3970 *p_in_p = in_p;
3971 return arg0;
3973 CASE_CONVERT:
3974 case NON_LVALUE_EXPR:
3975 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3976 return NULL_TREE;
3978 if (! INTEGRAL_TYPE_P (arg0_type)
3979 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3980 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3981 return NULL_TREE;
3983 n_low = low, n_high = high;
3985 if (n_low != 0)
3986 n_low = fold_convert_loc (loc, arg0_type, n_low);
3988 if (n_high != 0)
3989 n_high = fold_convert_loc (loc, arg0_type, n_high);
3991 /* If we're converting arg0 from an unsigned type, to exp,
3992 a signed type, we will be doing the comparison as unsigned.
3993 The tests above have already verified that LOW and HIGH
3994 are both positive.
3996 So we have to ensure that we will handle large unsigned
3997 values the same way that the current signed bounds treat
3998 negative values. */
4000 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4002 tree high_positive;
4003 tree equiv_type;
4004 /* For fixed-point modes, we need to pass the saturating flag
4005 as the 2nd parameter. */
4006 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4007 equiv_type
4008 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4009 TYPE_SATURATING (arg0_type));
4010 else
4011 equiv_type
4012 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4014 /* A range without an upper bound is, naturally, unbounded.
4015 Since convert would have cropped a very large value, use
4016 the max value for the destination type. */
4017 high_positive
4018 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4019 : TYPE_MAX_VALUE (arg0_type);
4021 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4022 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4023 fold_convert_loc (loc, arg0_type,
4024 high_positive),
4025 build_int_cst (arg0_type, 1));
4027 /* If the low bound is specified, "and" the range with the
4028 range for which the original unsigned value will be
4029 positive. */
4030 if (low != 0)
4032 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4033 1, fold_convert_loc (loc, arg0_type,
4034 integer_zero_node),
4035 high_positive))
4036 return NULL_TREE;
4038 in_p = (n_in_p == in_p);
4040 else
4042 /* Otherwise, "or" the range with the range of the input
4043 that will be interpreted as negative. */
4044 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4045 1, fold_convert_loc (loc, arg0_type,
4046 integer_zero_node),
4047 high_positive))
4048 return NULL_TREE;
4050 in_p = (in_p != n_in_p);
4054 *p_low = n_low;
4055 *p_high = n_high;
4056 *p_in_p = in_p;
4057 return arg0;
4059 default:
4060 return NULL_TREE;
4064 /* Given EXP, a logical expression, set the range it is testing into
4065 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4066 actually being tested. *PLOW and *PHIGH will be made of the same
4067 type as the returned expression. If EXP is not a comparison, we
4068 will most likely not be returning a useful value and range. Set
4069 *STRICT_OVERFLOW_P to true if the return value is only valid
4070 because signed overflow is undefined; otherwise, do not change
4071 *STRICT_OVERFLOW_P. */
4073 tree
4074 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4075 bool *strict_overflow_p)
4077 enum tree_code code;
4078 tree arg0, arg1 = NULL_TREE;
4079 tree exp_type, nexp;
4080 int in_p;
4081 tree low, high;
4082 location_t loc = EXPR_LOCATION (exp);
4084 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4085 and see if we can refine the range. Some of the cases below may not
4086 happen, but it doesn't seem worth worrying about this. We "continue"
4087 the outer loop when we've changed something; otherwise we "break"
4088 the switch, which will "break" the while. */
4090 in_p = 0;
4091 low = high = build_int_cst (TREE_TYPE (exp), 0);
4093 while (1)
4095 code = TREE_CODE (exp);
4096 exp_type = TREE_TYPE (exp);
4097 arg0 = NULL_TREE;
4099 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4101 if (TREE_OPERAND_LENGTH (exp) > 0)
4102 arg0 = TREE_OPERAND (exp, 0);
4103 if (TREE_CODE_CLASS (code) == tcc_binary
4104 || TREE_CODE_CLASS (code) == tcc_comparison
4105 || (TREE_CODE_CLASS (code) == tcc_expression
4106 && TREE_OPERAND_LENGTH (exp) > 1))
4107 arg1 = TREE_OPERAND (exp, 1);
4109 if (arg0 == NULL_TREE)
4110 break;
4112 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4113 &high, &in_p, strict_overflow_p);
4114 if (nexp == NULL_TREE)
4115 break;
4116 exp = nexp;
4119 /* If EXP is a constant, we can evaluate whether this is true or false. */
4120 if (TREE_CODE (exp) == INTEGER_CST)
4122 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4123 exp, 0, low, 0))
4124 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4125 exp, 1, high, 1)));
4126 low = high = 0;
4127 exp = 0;
4130 *pin_p = in_p, *plow = low, *phigh = high;
4131 return exp;
4134 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4135 type, TYPE, return an expression to test if EXP is in (or out of, depending
4136 on IN_P) the range. Return 0 if the test couldn't be created. */
4138 tree
4139 build_range_check (location_t loc, tree type, tree exp, int in_p,
4140 tree low, tree high)
4142 tree etype = TREE_TYPE (exp), value;
4144 #ifdef HAVE_canonicalize_funcptr_for_compare
4145 /* Disable this optimization for function pointer expressions
4146 on targets that require function pointer canonicalization. */
4147 if (HAVE_canonicalize_funcptr_for_compare
4148 && TREE_CODE (etype) == POINTER_TYPE
4149 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4150 return NULL_TREE;
4151 #endif
4153 if (! in_p)
4155 value = build_range_check (loc, type, exp, 1, low, high);
4156 if (value != 0)
4157 return invert_truthvalue_loc (loc, value);
4159 return 0;
4162 if (low == 0 && high == 0)
4163 return build_int_cst (type, 1);
4165 if (low == 0)
4166 return fold_build2_loc (loc, LE_EXPR, type, exp,
4167 fold_convert_loc (loc, etype, high));
4169 if (high == 0)
4170 return fold_build2_loc (loc, GE_EXPR, type, exp,
4171 fold_convert_loc (loc, etype, low));
4173 if (operand_equal_p (low, high, 0))
4174 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4175 fold_convert_loc (loc, etype, low));
4177 if (integer_zerop (low))
4179 if (! TYPE_UNSIGNED (etype))
4181 etype = unsigned_type_for (etype);
4182 high = fold_convert_loc (loc, etype, high);
4183 exp = fold_convert_loc (loc, etype, exp);
4185 return build_range_check (loc, type, exp, 1, 0, high);
4188 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4189 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4191 unsigned HOST_WIDE_INT lo;
4192 HOST_WIDE_INT hi;
4193 int prec;
4195 prec = TYPE_PRECISION (etype);
4196 if (prec <= HOST_BITS_PER_WIDE_INT)
4198 hi = 0;
4199 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4201 else
4203 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4204 lo = (unsigned HOST_WIDE_INT) -1;
4207 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4209 if (TYPE_UNSIGNED (etype))
4211 tree signed_etype = signed_type_for (etype);
4212 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4213 etype
4214 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4215 else
4216 etype = signed_etype;
4217 exp = fold_convert_loc (loc, etype, exp);
4219 return fold_build2_loc (loc, GT_EXPR, type, exp,
4220 build_int_cst (etype, 0));
4224 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4225 This requires wrap-around arithmetics for the type of the expression.
4226 First make sure that arithmetics in this type is valid, then make sure
4227 that it wraps around. */
4228 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4229 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4230 TYPE_UNSIGNED (etype));
4232 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4234 tree utype, minv, maxv;
4236 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4237 for the type in question, as we rely on this here. */
4238 utype = unsigned_type_for (etype);
4239 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4240 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4241 integer_one_node, 1);
4242 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4244 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4245 minv, 1, maxv, 1)))
4246 etype = utype;
4247 else
4248 return 0;
4251 high = fold_convert_loc (loc, etype, high);
4252 low = fold_convert_loc (loc, etype, low);
4253 exp = fold_convert_loc (loc, etype, exp);
4255 value = const_binop (MINUS_EXPR, high, low);
4258 if (POINTER_TYPE_P (etype))
4260 if (value != 0 && !TREE_OVERFLOW (value))
4262 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4263 return build_range_check (loc, type,
4264 fold_build_pointer_plus_loc (loc, exp, low),
4265 1, build_int_cst (etype, 0), value);
4267 return 0;
4270 if (value != 0 && !TREE_OVERFLOW (value))
4271 return build_range_check (loc, type,
4272 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4273 1, build_int_cst (etype, 0), value);
4275 return 0;
4278 /* Return the predecessor of VAL in its type, handling the infinite case. */
4280 static tree
4281 range_predecessor (tree val)
4283 tree type = TREE_TYPE (val);
4285 if (INTEGRAL_TYPE_P (type)
4286 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4287 return 0;
4288 else
4289 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4292 /* Return the successor of VAL in its type, handling the infinite case. */
4294 static tree
4295 range_successor (tree val)
4297 tree type = TREE_TYPE (val);
4299 if (INTEGRAL_TYPE_P (type)
4300 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4301 return 0;
4302 else
4303 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4306 /* Given two ranges, see if we can merge them into one. Return 1 if we
4307 can, 0 if we can't. Set the output range into the specified parameters. */
4309 bool
4310 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4311 tree high0, int in1_p, tree low1, tree high1)
4313 int no_overlap;
4314 int subset;
4315 int temp;
4316 tree tem;
4317 int in_p;
4318 tree low, high;
4319 int lowequal = ((low0 == 0 && low1 == 0)
4320 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4321 low0, 0, low1, 0)));
4322 int highequal = ((high0 == 0 && high1 == 0)
4323 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4324 high0, 1, high1, 1)));
4326 /* Make range 0 be the range that starts first, or ends last if they
4327 start at the same value. Swap them if it isn't. */
4328 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4329 low0, 0, low1, 0))
4330 || (lowequal
4331 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4332 high1, 1, high0, 1))))
4334 temp = in0_p, in0_p = in1_p, in1_p = temp;
4335 tem = low0, low0 = low1, low1 = tem;
4336 tem = high0, high0 = high1, high1 = tem;
4339 /* Now flag two cases, whether the ranges are disjoint or whether the
4340 second range is totally subsumed in the first. Note that the tests
4341 below are simplified by the ones above. */
4342 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4343 high0, 1, low1, 0));
4344 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4345 high1, 1, high0, 1));
4347 /* We now have four cases, depending on whether we are including or
4348 excluding the two ranges. */
4349 if (in0_p && in1_p)
4351 /* If they don't overlap, the result is false. If the second range
4352 is a subset it is the result. Otherwise, the range is from the start
4353 of the second to the end of the first. */
4354 if (no_overlap)
4355 in_p = 0, low = high = 0;
4356 else if (subset)
4357 in_p = 1, low = low1, high = high1;
4358 else
4359 in_p = 1, low = low1, high = high0;
4362 else if (in0_p && ! in1_p)
4364 /* If they don't overlap, the result is the first range. If they are
4365 equal, the result is false. If the second range is a subset of the
4366 first, and the ranges begin at the same place, we go from just after
4367 the end of the second range to the end of the first. If the second
4368 range is not a subset of the first, or if it is a subset and both
4369 ranges end at the same place, the range starts at the start of the
4370 first range and ends just before the second range.
4371 Otherwise, we can't describe this as a single range. */
4372 if (no_overlap)
4373 in_p = 1, low = low0, high = high0;
4374 else if (lowequal && highequal)
4375 in_p = 0, low = high = 0;
4376 else if (subset && lowequal)
4378 low = range_successor (high1);
4379 high = high0;
4380 in_p = 1;
4381 if (low == 0)
4383 /* We are in the weird situation where high0 > high1 but
4384 high1 has no successor. Punt. */
4385 return 0;
4388 else if (! subset || highequal)
4390 low = low0;
4391 high = range_predecessor (low1);
4392 in_p = 1;
4393 if (high == 0)
4395 /* low0 < low1 but low1 has no predecessor. Punt. */
4396 return 0;
4399 else
4400 return 0;
4403 else if (! in0_p && in1_p)
4405 /* If they don't overlap, the result is the second range. If the second
4406 is a subset of the first, the result is false. Otherwise,
4407 the range starts just after the first range and ends at the
4408 end of the second. */
4409 if (no_overlap)
4410 in_p = 1, low = low1, high = high1;
4411 else if (subset || highequal)
4412 in_p = 0, low = high = 0;
4413 else
4415 low = range_successor (high0);
4416 high = high1;
4417 in_p = 1;
4418 if (low == 0)
4420 /* high1 > high0 but high0 has no successor. Punt. */
4421 return 0;
4426 else
4428 /* The case where we are excluding both ranges. Here the complex case
4429 is if they don't overlap. In that case, the only time we have a
4430 range is if they are adjacent. If the second is a subset of the
4431 first, the result is the first. Otherwise, the range to exclude
4432 starts at the beginning of the first range and ends at the end of the
4433 second. */
4434 if (no_overlap)
4436 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4437 range_successor (high0),
4438 1, low1, 0)))
4439 in_p = 0, low = low0, high = high1;
4440 else
4442 /* Canonicalize - [min, x] into - [-, x]. */
4443 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4444 switch (TREE_CODE (TREE_TYPE (low0)))
4446 case ENUMERAL_TYPE:
4447 if (TYPE_PRECISION (TREE_TYPE (low0))
4448 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4449 break;
4450 /* FALLTHROUGH */
4451 case INTEGER_TYPE:
4452 if (tree_int_cst_equal (low0,
4453 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4454 low0 = 0;
4455 break;
4456 case POINTER_TYPE:
4457 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4458 && integer_zerop (low0))
4459 low0 = 0;
4460 break;
4461 default:
4462 break;
4465 /* Canonicalize - [x, max] into - [x, -]. */
4466 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4467 switch (TREE_CODE (TREE_TYPE (high1)))
4469 case ENUMERAL_TYPE:
4470 if (TYPE_PRECISION (TREE_TYPE (high1))
4471 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4472 break;
4473 /* FALLTHROUGH */
4474 case INTEGER_TYPE:
4475 if (tree_int_cst_equal (high1,
4476 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4477 high1 = 0;
4478 break;
4479 case POINTER_TYPE:
4480 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4481 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4482 high1, 1,
4483 integer_one_node, 1)))
4484 high1 = 0;
4485 break;
4486 default:
4487 break;
4490 /* The ranges might be also adjacent between the maximum and
4491 minimum values of the given type. For
4492 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4493 return + [x + 1, y - 1]. */
4494 if (low0 == 0 && high1 == 0)
4496 low = range_successor (high0);
4497 high = range_predecessor (low1);
4498 if (low == 0 || high == 0)
4499 return 0;
4501 in_p = 1;
4503 else
4504 return 0;
4507 else if (subset)
4508 in_p = 0, low = low0, high = high0;
4509 else
4510 in_p = 0, low = low0, high = high1;
4513 *pin_p = in_p, *plow = low, *phigh = high;
4514 return 1;
4518 /* Subroutine of fold, looking inside expressions of the form
4519 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4520 of the COND_EXPR. This function is being used also to optimize
4521 A op B ? C : A, by reversing the comparison first.
4523 Return a folded expression whose code is not a COND_EXPR
4524 anymore, or NULL_TREE if no folding opportunity is found. */
4526 static tree
4527 fold_cond_expr_with_comparison (location_t loc, tree type,
4528 tree arg0, tree arg1, tree arg2)
4530 enum tree_code comp_code = TREE_CODE (arg0);
4531 tree arg00 = TREE_OPERAND (arg0, 0);
4532 tree arg01 = TREE_OPERAND (arg0, 1);
4533 tree arg1_type = TREE_TYPE (arg1);
4534 tree tem;
4536 STRIP_NOPS (arg1);
4537 STRIP_NOPS (arg2);
4539 /* If we have A op 0 ? A : -A, consider applying the following
4540 transformations:
4542 A == 0? A : -A same as -A
4543 A != 0? A : -A same as A
4544 A >= 0? A : -A same as abs (A)
4545 A > 0? A : -A same as abs (A)
4546 A <= 0? A : -A same as -abs (A)
4547 A < 0? A : -A same as -abs (A)
4549 None of these transformations work for modes with signed
4550 zeros. If A is +/-0, the first two transformations will
4551 change the sign of the result (from +0 to -0, or vice
4552 versa). The last four will fix the sign of the result,
4553 even though the original expressions could be positive or
4554 negative, depending on the sign of A.
4556 Note that all these transformations are correct if A is
4557 NaN, since the two alternatives (A and -A) are also NaNs. */
4558 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4559 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4560 ? real_zerop (arg01)
4561 : integer_zerop (arg01))
4562 && ((TREE_CODE (arg2) == NEGATE_EXPR
4563 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4564 /* In the case that A is of the form X-Y, '-A' (arg2) may
4565 have already been folded to Y-X, check for that. */
4566 || (TREE_CODE (arg1) == MINUS_EXPR
4567 && TREE_CODE (arg2) == MINUS_EXPR
4568 && operand_equal_p (TREE_OPERAND (arg1, 0),
4569 TREE_OPERAND (arg2, 1), 0)
4570 && operand_equal_p (TREE_OPERAND (arg1, 1),
4571 TREE_OPERAND (arg2, 0), 0))))
4572 switch (comp_code)
4574 case EQ_EXPR:
4575 case UNEQ_EXPR:
4576 tem = fold_convert_loc (loc, arg1_type, arg1);
4577 return pedantic_non_lvalue_loc (loc,
4578 fold_convert_loc (loc, type,
4579 negate_expr (tem)));
4580 case NE_EXPR:
4581 case LTGT_EXPR:
4582 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4583 case UNGE_EXPR:
4584 case UNGT_EXPR:
4585 if (flag_trapping_math)
4586 break;
4587 /* Fall through. */
4588 case GE_EXPR:
4589 case GT_EXPR:
4590 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4591 arg1 = fold_convert_loc (loc, signed_type_for
4592 (TREE_TYPE (arg1)), arg1);
4593 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4594 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4595 case UNLE_EXPR:
4596 case UNLT_EXPR:
4597 if (flag_trapping_math)
4598 break;
4599 case LE_EXPR:
4600 case LT_EXPR:
4601 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4602 arg1 = fold_convert_loc (loc, signed_type_for
4603 (TREE_TYPE (arg1)), arg1);
4604 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4605 return negate_expr (fold_convert_loc (loc, type, tem));
4606 default:
4607 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4608 break;
4611 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4612 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4613 both transformations are correct when A is NaN: A != 0
4614 is then true, and A == 0 is false. */
4616 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4617 && integer_zerop (arg01) && integer_zerop (arg2))
4619 if (comp_code == NE_EXPR)
4620 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4621 else if (comp_code == EQ_EXPR)
4622 return build_int_cst (type, 0);
4625 /* Try some transformations of A op B ? A : B.
4627 A == B? A : B same as B
4628 A != B? A : B same as A
4629 A >= B? A : B same as max (A, B)
4630 A > B? A : B same as max (B, A)
4631 A <= B? A : B same as min (A, B)
4632 A < B? A : B same as min (B, A)
4634 As above, these transformations don't work in the presence
4635 of signed zeros. For example, if A and B are zeros of
4636 opposite sign, the first two transformations will change
4637 the sign of the result. In the last four, the original
4638 expressions give different results for (A=+0, B=-0) and
4639 (A=-0, B=+0), but the transformed expressions do not.
4641 The first two transformations are correct if either A or B
4642 is a NaN. In the first transformation, the condition will
4643 be false, and B will indeed be chosen. In the case of the
4644 second transformation, the condition A != B will be true,
4645 and A will be chosen.
4647 The conversions to max() and min() are not correct if B is
4648 a number and A is not. The conditions in the original
4649 expressions will be false, so all four give B. The min()
4650 and max() versions would give a NaN instead. */
4651 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4652 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4653 /* Avoid these transformations if the COND_EXPR may be used
4654 as an lvalue in the C++ front-end. PR c++/19199. */
4655 && (in_gimple_form
4656 || (strcmp (lang_hooks.name, "GNU C++") != 0
4657 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4658 || ! maybe_lvalue_p (arg1)
4659 || ! maybe_lvalue_p (arg2)))
4661 tree comp_op0 = arg00;
4662 tree comp_op1 = arg01;
4663 tree comp_type = TREE_TYPE (comp_op0);
4665 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4666 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4668 comp_type = type;
4669 comp_op0 = arg1;
4670 comp_op1 = arg2;
4673 switch (comp_code)
4675 case EQ_EXPR:
4676 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4677 case NE_EXPR:
4678 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4679 case LE_EXPR:
4680 case LT_EXPR:
4681 case UNLE_EXPR:
4682 case UNLT_EXPR:
4683 /* In C++ a ?: expression can be an lvalue, so put the
4684 operand which will be used if they are equal first
4685 so that we can convert this back to the
4686 corresponding COND_EXPR. */
4687 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4689 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4690 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4691 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4692 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4693 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4694 comp_op1, comp_op0);
4695 return pedantic_non_lvalue_loc (loc,
4696 fold_convert_loc (loc, type, tem));
4698 break;
4699 case GE_EXPR:
4700 case GT_EXPR:
4701 case UNGE_EXPR:
4702 case UNGT_EXPR:
4703 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4705 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4706 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4707 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4708 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4709 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4710 comp_op1, comp_op0);
4711 return pedantic_non_lvalue_loc (loc,
4712 fold_convert_loc (loc, type, tem));
4714 break;
4715 case UNEQ_EXPR:
4716 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4717 return pedantic_non_lvalue_loc (loc,
4718 fold_convert_loc (loc, type, arg2));
4719 break;
4720 case LTGT_EXPR:
4721 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4722 return pedantic_non_lvalue_loc (loc,
4723 fold_convert_loc (loc, type, arg1));
4724 break;
4725 default:
4726 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4727 break;
4731 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4732 we might still be able to simplify this. For example,
4733 if C1 is one less or one more than C2, this might have started
4734 out as a MIN or MAX and been transformed by this function.
4735 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4737 if (INTEGRAL_TYPE_P (type)
4738 && TREE_CODE (arg01) == INTEGER_CST
4739 && TREE_CODE (arg2) == INTEGER_CST)
4740 switch (comp_code)
4742 case EQ_EXPR:
4743 if (TREE_CODE (arg1) == INTEGER_CST)
4744 break;
4745 /* We can replace A with C1 in this case. */
4746 arg1 = fold_convert_loc (loc, type, arg01);
4747 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4749 case LT_EXPR:
4750 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4751 MIN_EXPR, to preserve the signedness of the comparison. */
4752 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4753 OEP_ONLY_CONST)
4754 && operand_equal_p (arg01,
4755 const_binop (PLUS_EXPR, arg2,
4756 build_int_cst (type, 1)),
4757 OEP_ONLY_CONST))
4759 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4760 fold_convert_loc (loc, TREE_TYPE (arg00),
4761 arg2));
4762 return pedantic_non_lvalue_loc (loc,
4763 fold_convert_loc (loc, type, tem));
4765 break;
4767 case LE_EXPR:
4768 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4769 as above. */
4770 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4771 OEP_ONLY_CONST)
4772 && operand_equal_p (arg01,
4773 const_binop (MINUS_EXPR, arg2,
4774 build_int_cst (type, 1)),
4775 OEP_ONLY_CONST))
4777 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4778 fold_convert_loc (loc, TREE_TYPE (arg00),
4779 arg2));
4780 return pedantic_non_lvalue_loc (loc,
4781 fold_convert_loc (loc, type, tem));
4783 break;
4785 case GT_EXPR:
4786 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4787 MAX_EXPR, to preserve the signedness of the comparison. */
4788 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4789 OEP_ONLY_CONST)
4790 && operand_equal_p (arg01,
4791 const_binop (MINUS_EXPR, arg2,
4792 build_int_cst (type, 1)),
4793 OEP_ONLY_CONST))
4795 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4796 fold_convert_loc (loc, TREE_TYPE (arg00),
4797 arg2));
4798 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4800 break;
4802 case GE_EXPR:
4803 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4804 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4805 OEP_ONLY_CONST)
4806 && operand_equal_p (arg01,
4807 const_binop (PLUS_EXPR, arg2,
4808 build_int_cst (type, 1)),
4809 OEP_ONLY_CONST))
4811 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4812 fold_convert_loc (loc, TREE_TYPE (arg00),
4813 arg2));
4814 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4816 break;
4817 case NE_EXPR:
4818 break;
4819 default:
4820 gcc_unreachable ();
4823 return NULL_TREE;
4828 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4829 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4830 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4831 false) >= 2)
4832 #endif
4834 /* EXP is some logical combination of boolean tests. See if we can
4835 merge it into some range test. Return the new tree if so. */
4837 static tree
4838 fold_range_test (location_t loc, enum tree_code code, tree type,
4839 tree op0, tree op1)
4841 int or_op = (code == TRUTH_ORIF_EXPR
4842 || code == TRUTH_OR_EXPR);
4843 int in0_p, in1_p, in_p;
4844 tree low0, low1, low, high0, high1, high;
4845 bool strict_overflow_p = false;
4846 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4847 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4848 tree tem;
4849 const char * const warnmsg = G_("assuming signed overflow does not occur "
4850 "when simplifying range test");
4852 /* If this is an OR operation, invert both sides; we will invert
4853 again at the end. */
4854 if (or_op)
4855 in0_p = ! in0_p, in1_p = ! in1_p;
4857 /* If both expressions are the same, if we can merge the ranges, and we
4858 can build the range test, return it or it inverted. If one of the
4859 ranges is always true or always false, consider it to be the same
4860 expression as the other. */
4861 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4862 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4863 in1_p, low1, high1)
4864 && 0 != (tem = (build_range_check (loc, type,
4865 lhs != 0 ? lhs
4866 : rhs != 0 ? rhs : integer_zero_node,
4867 in_p, low, high))))
4869 if (strict_overflow_p)
4870 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4871 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4874 /* On machines where the branch cost is expensive, if this is a
4875 short-circuited branch and the underlying object on both sides
4876 is the same, make a non-short-circuit operation. */
4877 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4878 && lhs != 0 && rhs != 0
4879 && (code == TRUTH_ANDIF_EXPR
4880 || code == TRUTH_ORIF_EXPR)
4881 && operand_equal_p (lhs, rhs, 0))
4883 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4884 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4885 which cases we can't do this. */
4886 if (simple_operand_p (lhs))
4887 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4888 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4889 type, op0, op1);
4891 else if (!lang_hooks.decls.global_bindings_p ()
4892 && !CONTAINS_PLACEHOLDER_P (lhs))
4894 tree common = save_expr (lhs);
4896 if (0 != (lhs = build_range_check (loc, type, common,
4897 or_op ? ! in0_p : in0_p,
4898 low0, high0))
4899 && (0 != (rhs = build_range_check (loc, type, common,
4900 or_op ? ! in1_p : in1_p,
4901 low1, high1))))
4903 if (strict_overflow_p)
4904 fold_overflow_warning (warnmsg,
4905 WARN_STRICT_OVERFLOW_COMPARISON);
4906 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4907 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4908 type, lhs, rhs);
4913 return 0;
4916 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4917 bit value. Arrange things so the extra bits will be set to zero if and
4918 only if C is signed-extended to its full width. If MASK is nonzero,
4919 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4921 static tree
4922 unextend (tree c, int p, int unsignedp, tree mask)
4924 tree type = TREE_TYPE (c);
4925 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4926 tree temp;
4928 if (p == modesize || unsignedp)
4929 return c;
4931 /* We work by getting just the sign bit into the low-order bit, then
4932 into the high-order bit, then sign-extend. We then XOR that value
4933 with C. */
4934 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4935 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4937 /* We must use a signed type in order to get an arithmetic right shift.
4938 However, we must also avoid introducing accidental overflows, so that
4939 a subsequent call to integer_zerop will work. Hence we must
4940 do the type conversion here. At this point, the constant is either
4941 zero or one, and the conversion to a signed type can never overflow.
4942 We could get an overflow if this conversion is done anywhere else. */
4943 if (TYPE_UNSIGNED (type))
4944 temp = fold_convert (signed_type_for (type), temp);
4946 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4947 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4948 if (mask != 0)
4949 temp = const_binop (BIT_AND_EXPR, temp,
4950 fold_convert (TREE_TYPE (c), mask));
4951 /* If necessary, convert the type back to match the type of C. */
4952 if (TYPE_UNSIGNED (type))
4953 temp = fold_convert (type, temp);
4955 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4958 /* For an expression that has the form
4959 (A && B) || ~B
4961 (A || B) && ~B,
4962 we can drop one of the inner expressions and simplify to
4963 A || ~B
4965 A && ~B
4966 LOC is the location of the resulting expression. OP is the inner
4967 logical operation; the left-hand side in the examples above, while CMPOP
4968 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4969 removing a condition that guards another, as in
4970 (A != NULL && A->...) || A == NULL
4971 which we must not transform. If RHS_ONLY is true, only eliminate the
4972 right-most operand of the inner logical operation. */
4974 static tree
4975 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4976 bool rhs_only)
4978 tree type = TREE_TYPE (cmpop);
4979 enum tree_code code = TREE_CODE (cmpop);
4980 enum tree_code truthop_code = TREE_CODE (op);
4981 tree lhs = TREE_OPERAND (op, 0);
4982 tree rhs = TREE_OPERAND (op, 1);
4983 tree orig_lhs = lhs, orig_rhs = rhs;
4984 enum tree_code rhs_code = TREE_CODE (rhs);
4985 enum tree_code lhs_code = TREE_CODE (lhs);
4986 enum tree_code inv_code;
4988 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4989 return NULL_TREE;
4991 if (TREE_CODE_CLASS (code) != tcc_comparison)
4992 return NULL_TREE;
4994 if (rhs_code == truthop_code)
4996 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4997 if (newrhs != NULL_TREE)
4999 rhs = newrhs;
5000 rhs_code = TREE_CODE (rhs);
5003 if (lhs_code == truthop_code && !rhs_only)
5005 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5006 if (newlhs != NULL_TREE)
5008 lhs = newlhs;
5009 lhs_code = TREE_CODE (lhs);
5013 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5014 if (inv_code == rhs_code
5015 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5016 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5017 return lhs;
5018 if (!rhs_only && inv_code == lhs_code
5019 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5020 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5021 return rhs;
5022 if (rhs != orig_rhs || lhs != orig_lhs)
5023 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5024 lhs, rhs);
5025 return NULL_TREE;
5028 /* Find ways of folding logical expressions of LHS and RHS:
5029 Try to merge two comparisons to the same innermost item.
5030 Look for range tests like "ch >= '0' && ch <= '9'".
5031 Look for combinations of simple terms on machines with expensive branches
5032 and evaluate the RHS unconditionally.
5034 For example, if we have p->a == 2 && p->b == 4 and we can make an
5035 object large enough to span both A and B, we can do this with a comparison
5036 against the object ANDed with the a mask.
5038 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5039 operations to do this with one comparison.
5041 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5042 function and the one above.
5044 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5045 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5047 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5048 two operands.
5050 We return the simplified tree or 0 if no optimization is possible. */
5052 static tree
5053 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5054 tree lhs, tree rhs)
5056 /* If this is the "or" of two comparisons, we can do something if
5057 the comparisons are NE_EXPR. If this is the "and", we can do something
5058 if the comparisons are EQ_EXPR. I.e.,
5059 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5061 WANTED_CODE is this operation code. For single bit fields, we can
5062 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5063 comparison for one-bit fields. */
5065 enum tree_code wanted_code;
5066 enum tree_code lcode, rcode;
5067 tree ll_arg, lr_arg, rl_arg, rr_arg;
5068 tree ll_inner, lr_inner, rl_inner, rr_inner;
5069 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5070 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5071 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5072 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5073 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5074 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5075 enum machine_mode lnmode, rnmode;
5076 tree ll_mask, lr_mask, rl_mask, rr_mask;
5077 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5078 tree l_const, r_const;
5079 tree lntype, rntype, result;
5080 HOST_WIDE_INT first_bit, end_bit;
5081 int volatilep;
5083 /* Start by getting the comparison codes. Fail if anything is volatile.
5084 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5085 it were surrounded with a NE_EXPR. */
5087 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5088 return 0;
5090 lcode = TREE_CODE (lhs);
5091 rcode = TREE_CODE (rhs);
5093 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5095 lhs = build2 (NE_EXPR, truth_type, lhs,
5096 build_int_cst (TREE_TYPE (lhs), 0));
5097 lcode = NE_EXPR;
5100 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5102 rhs = build2 (NE_EXPR, truth_type, rhs,
5103 build_int_cst (TREE_TYPE (rhs), 0));
5104 rcode = NE_EXPR;
5107 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5108 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5109 return 0;
5111 ll_arg = TREE_OPERAND (lhs, 0);
5112 lr_arg = TREE_OPERAND (lhs, 1);
5113 rl_arg = TREE_OPERAND (rhs, 0);
5114 rr_arg = TREE_OPERAND (rhs, 1);
5116 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5117 if (simple_operand_p (ll_arg)
5118 && simple_operand_p (lr_arg))
5120 if (operand_equal_p (ll_arg, rl_arg, 0)
5121 && operand_equal_p (lr_arg, rr_arg, 0))
5123 result = combine_comparisons (loc, code, lcode, rcode,
5124 truth_type, ll_arg, lr_arg);
5125 if (result)
5126 return result;
5128 else if (operand_equal_p (ll_arg, rr_arg, 0)
5129 && operand_equal_p (lr_arg, rl_arg, 0))
5131 result = combine_comparisons (loc, code, lcode,
5132 swap_tree_comparison (rcode),
5133 truth_type, ll_arg, lr_arg);
5134 if (result)
5135 return result;
5139 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5140 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5142 /* If the RHS can be evaluated unconditionally and its operands are
5143 simple, it wins to evaluate the RHS unconditionally on machines
5144 with expensive branches. In this case, this isn't a comparison
5145 that can be merged. */
5147 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5148 false) >= 2
5149 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5150 && simple_operand_p (rl_arg)
5151 && simple_operand_p (rr_arg))
5153 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5154 if (code == TRUTH_OR_EXPR
5155 && lcode == NE_EXPR && integer_zerop (lr_arg)
5156 && rcode == NE_EXPR && integer_zerop (rr_arg)
5157 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5158 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5159 return build2_loc (loc, NE_EXPR, truth_type,
5160 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5161 ll_arg, rl_arg),
5162 build_int_cst (TREE_TYPE (ll_arg), 0));
5164 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5165 if (code == TRUTH_AND_EXPR
5166 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5167 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5168 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5169 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5170 return build2_loc (loc, EQ_EXPR, truth_type,
5171 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5172 ll_arg, rl_arg),
5173 build_int_cst (TREE_TYPE (ll_arg), 0));
5176 /* See if the comparisons can be merged. Then get all the parameters for
5177 each side. */
5179 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5180 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5181 return 0;
5183 volatilep = 0;
5184 ll_inner = decode_field_reference (loc, ll_arg,
5185 &ll_bitsize, &ll_bitpos, &ll_mode,
5186 &ll_unsignedp, &volatilep, &ll_mask,
5187 &ll_and_mask);
5188 lr_inner = decode_field_reference (loc, lr_arg,
5189 &lr_bitsize, &lr_bitpos, &lr_mode,
5190 &lr_unsignedp, &volatilep, &lr_mask,
5191 &lr_and_mask);
5192 rl_inner = decode_field_reference (loc, rl_arg,
5193 &rl_bitsize, &rl_bitpos, &rl_mode,
5194 &rl_unsignedp, &volatilep, &rl_mask,
5195 &rl_and_mask);
5196 rr_inner = decode_field_reference (loc, rr_arg,
5197 &rr_bitsize, &rr_bitpos, &rr_mode,
5198 &rr_unsignedp, &volatilep, &rr_mask,
5199 &rr_and_mask);
5201 /* It must be true that the inner operation on the lhs of each
5202 comparison must be the same if we are to be able to do anything.
5203 Then see if we have constants. If not, the same must be true for
5204 the rhs's. */
5205 if (volatilep || ll_inner == 0 || rl_inner == 0
5206 || ! operand_equal_p (ll_inner, rl_inner, 0))
5207 return 0;
5209 if (TREE_CODE (lr_arg) == INTEGER_CST
5210 && TREE_CODE (rr_arg) == INTEGER_CST)
5211 l_const = lr_arg, r_const = rr_arg;
5212 else if (lr_inner == 0 || rr_inner == 0
5213 || ! operand_equal_p (lr_inner, rr_inner, 0))
5214 return 0;
5215 else
5216 l_const = r_const = 0;
5218 /* If either comparison code is not correct for our logical operation,
5219 fail. However, we can convert a one-bit comparison against zero into
5220 the opposite comparison against that bit being set in the field. */
5222 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5223 if (lcode != wanted_code)
5225 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5227 /* Make the left operand unsigned, since we are only interested
5228 in the value of one bit. Otherwise we are doing the wrong
5229 thing below. */
5230 ll_unsignedp = 1;
5231 l_const = ll_mask;
5233 else
5234 return 0;
5237 /* This is analogous to the code for l_const above. */
5238 if (rcode != wanted_code)
5240 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5242 rl_unsignedp = 1;
5243 r_const = rl_mask;
5245 else
5246 return 0;
5249 /* See if we can find a mode that contains both fields being compared on
5250 the left. If we can't, fail. Otherwise, update all constants and masks
5251 to be relative to a field of that size. */
5252 first_bit = MIN (ll_bitpos, rl_bitpos);
5253 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5254 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5255 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5256 volatilep);
5257 if (lnmode == VOIDmode)
5258 return 0;
5260 lnbitsize = GET_MODE_BITSIZE (lnmode);
5261 lnbitpos = first_bit & ~ (lnbitsize - 1);
5262 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5263 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5265 if (BYTES_BIG_ENDIAN)
5267 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5268 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5271 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5272 size_int (xll_bitpos));
5273 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5274 size_int (xrl_bitpos));
5276 if (l_const)
5278 l_const = fold_convert_loc (loc, lntype, l_const);
5279 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5280 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5281 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5282 fold_build1_loc (loc, BIT_NOT_EXPR,
5283 lntype, ll_mask))))
5285 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5287 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5290 if (r_const)
5292 r_const = fold_convert_loc (loc, lntype, r_const);
5293 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5294 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5295 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5296 fold_build1_loc (loc, BIT_NOT_EXPR,
5297 lntype, rl_mask))))
5299 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5301 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5305 /* If the right sides are not constant, do the same for it. Also,
5306 disallow this optimization if a size or signedness mismatch occurs
5307 between the left and right sides. */
5308 if (l_const == 0)
5310 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5311 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5312 /* Make sure the two fields on the right
5313 correspond to the left without being swapped. */
5314 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5315 return 0;
5317 first_bit = MIN (lr_bitpos, rr_bitpos);
5318 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5319 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5320 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5321 volatilep);
5322 if (rnmode == VOIDmode)
5323 return 0;
5325 rnbitsize = GET_MODE_BITSIZE (rnmode);
5326 rnbitpos = first_bit & ~ (rnbitsize - 1);
5327 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5328 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5330 if (BYTES_BIG_ENDIAN)
5332 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5333 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5336 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5337 rntype, lr_mask),
5338 size_int (xlr_bitpos));
5339 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5340 rntype, rr_mask),
5341 size_int (xrr_bitpos));
5343 /* Make a mask that corresponds to both fields being compared.
5344 Do this for both items being compared. If the operands are the
5345 same size and the bits being compared are in the same position
5346 then we can do this by masking both and comparing the masked
5347 results. */
5348 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5349 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5350 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5352 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5353 ll_unsignedp || rl_unsignedp);
5354 if (! all_ones_mask_p (ll_mask, lnbitsize))
5355 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5357 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5358 lr_unsignedp || rr_unsignedp);
5359 if (! all_ones_mask_p (lr_mask, rnbitsize))
5360 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5362 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5365 /* There is still another way we can do something: If both pairs of
5366 fields being compared are adjacent, we may be able to make a wider
5367 field containing them both.
5369 Note that we still must mask the lhs/rhs expressions. Furthermore,
5370 the mask must be shifted to account for the shift done by
5371 make_bit_field_ref. */
5372 if ((ll_bitsize + ll_bitpos == rl_bitpos
5373 && lr_bitsize + lr_bitpos == rr_bitpos)
5374 || (ll_bitpos == rl_bitpos + rl_bitsize
5375 && lr_bitpos == rr_bitpos + rr_bitsize))
5377 tree type;
5379 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5380 ll_bitsize + rl_bitsize,
5381 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5382 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5383 lr_bitsize + rr_bitsize,
5384 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5386 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5387 size_int (MIN (xll_bitpos, xrl_bitpos)));
5388 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5389 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5391 /* Convert to the smaller type before masking out unwanted bits. */
5392 type = lntype;
5393 if (lntype != rntype)
5395 if (lnbitsize > rnbitsize)
5397 lhs = fold_convert_loc (loc, rntype, lhs);
5398 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5399 type = rntype;
5401 else if (lnbitsize < rnbitsize)
5403 rhs = fold_convert_loc (loc, lntype, rhs);
5404 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5405 type = lntype;
5409 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5410 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5412 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5413 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5415 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5418 return 0;
5421 /* Handle the case of comparisons with constants. If there is something in
5422 common between the masks, those bits of the constants must be the same.
5423 If not, the condition is always false. Test for this to avoid generating
5424 incorrect code below. */
5425 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5426 if (! integer_zerop (result)
5427 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5428 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5430 if (wanted_code == NE_EXPR)
5432 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5433 return constant_boolean_node (true, truth_type);
5435 else
5437 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5438 return constant_boolean_node (false, truth_type);
5442 /* Construct the expression we will return. First get the component
5443 reference we will make. Unless the mask is all ones the width of
5444 that field, perform the mask operation. Then compare with the
5445 merged constant. */
5446 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5447 ll_unsignedp || rl_unsignedp);
5449 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5450 if (! all_ones_mask_p (ll_mask, lnbitsize))
5451 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5453 return build2_loc (loc, wanted_code, truth_type, result,
5454 const_binop (BIT_IOR_EXPR, l_const, r_const));
5457 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5458 constant. */
5460 static tree
5461 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5462 tree op0, tree op1)
5464 tree arg0 = op0;
5465 enum tree_code op_code;
5466 tree comp_const;
5467 tree minmax_const;
5468 int consts_equal, consts_lt;
5469 tree inner;
5471 STRIP_SIGN_NOPS (arg0);
5473 op_code = TREE_CODE (arg0);
5474 minmax_const = TREE_OPERAND (arg0, 1);
5475 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5476 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5477 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5478 inner = TREE_OPERAND (arg0, 0);
5480 /* If something does not permit us to optimize, return the original tree. */
5481 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5482 || TREE_CODE (comp_const) != INTEGER_CST
5483 || TREE_OVERFLOW (comp_const)
5484 || TREE_CODE (minmax_const) != INTEGER_CST
5485 || TREE_OVERFLOW (minmax_const))
5486 return NULL_TREE;
5488 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5489 and GT_EXPR, doing the rest with recursive calls using logical
5490 simplifications. */
5491 switch (code)
5493 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5495 tree tem
5496 = optimize_minmax_comparison (loc,
5497 invert_tree_comparison (code, false),
5498 type, op0, op1);
5499 if (tem)
5500 return invert_truthvalue_loc (loc, tem);
5501 return NULL_TREE;
5504 case GE_EXPR:
5505 return
5506 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5507 optimize_minmax_comparison
5508 (loc, EQ_EXPR, type, arg0, comp_const),
5509 optimize_minmax_comparison
5510 (loc, GT_EXPR, type, arg0, comp_const));
5512 case EQ_EXPR:
5513 if (op_code == MAX_EXPR && consts_equal)
5514 /* MAX (X, 0) == 0 -> X <= 0 */
5515 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5517 else if (op_code == MAX_EXPR && consts_lt)
5518 /* MAX (X, 0) == 5 -> X == 5 */
5519 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5521 else if (op_code == MAX_EXPR)
5522 /* MAX (X, 0) == -1 -> false */
5523 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5525 else if (consts_equal)
5526 /* MIN (X, 0) == 0 -> X >= 0 */
5527 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5529 else if (consts_lt)
5530 /* MIN (X, 0) == 5 -> false */
5531 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5533 else
5534 /* MIN (X, 0) == -1 -> X == -1 */
5535 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5537 case GT_EXPR:
5538 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5539 /* MAX (X, 0) > 0 -> X > 0
5540 MAX (X, 0) > 5 -> X > 5 */
5541 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5543 else if (op_code == MAX_EXPR)
5544 /* MAX (X, 0) > -1 -> true */
5545 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5547 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5548 /* MIN (X, 0) > 0 -> false
5549 MIN (X, 0) > 5 -> false */
5550 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5552 else
5553 /* MIN (X, 0) > -1 -> X > -1 */
5554 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5556 default:
5557 return NULL_TREE;
5561 /* T is an integer expression that is being multiplied, divided, or taken a
5562 modulus (CODE says which and what kind of divide or modulus) by a
5563 constant C. See if we can eliminate that operation by folding it with
5564 other operations already in T. WIDE_TYPE, if non-null, is a type that
5565 should be used for the computation if wider than our type.
5567 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5568 (X * 2) + (Y * 4). We must, however, be assured that either the original
5569 expression would not overflow or that overflow is undefined for the type
5570 in the language in question.
5572 If we return a non-null expression, it is an equivalent form of the
5573 original computation, but need not be in the original type.
5575 We set *STRICT_OVERFLOW_P to true if the return values depends on
5576 signed overflow being undefined. Otherwise we do not change
5577 *STRICT_OVERFLOW_P. */
5579 static tree
5580 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5581 bool *strict_overflow_p)
5583 /* To avoid exponential search depth, refuse to allow recursion past
5584 three levels. Beyond that (1) it's highly unlikely that we'll find
5585 something interesting and (2) we've probably processed it before
5586 when we built the inner expression. */
5588 static int depth;
5589 tree ret;
5591 if (depth > 3)
5592 return NULL;
5594 depth++;
5595 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5596 depth--;
5598 return ret;
5601 static tree
5602 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5603 bool *strict_overflow_p)
5605 tree type = TREE_TYPE (t);
5606 enum tree_code tcode = TREE_CODE (t);
5607 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5608 > GET_MODE_SIZE (TYPE_MODE (type)))
5609 ? wide_type : type);
5610 tree t1, t2;
5611 int same_p = tcode == code;
5612 tree op0 = NULL_TREE, op1 = NULL_TREE;
5613 bool sub_strict_overflow_p;
5615 /* Don't deal with constants of zero here; they confuse the code below. */
5616 if (integer_zerop (c))
5617 return NULL_TREE;
5619 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5620 op0 = TREE_OPERAND (t, 0);
5622 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5623 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5625 /* Note that we need not handle conditional operations here since fold
5626 already handles those cases. So just do arithmetic here. */
5627 switch (tcode)
5629 case INTEGER_CST:
5630 /* For a constant, we can always simplify if we are a multiply
5631 or (for divide and modulus) if it is a multiple of our constant. */
5632 if (code == MULT_EXPR
5633 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5634 return const_binop (code, fold_convert (ctype, t),
5635 fold_convert (ctype, c));
5636 break;
5638 CASE_CONVERT: case NON_LVALUE_EXPR:
5639 /* If op0 is an expression ... */
5640 if ((COMPARISON_CLASS_P (op0)
5641 || UNARY_CLASS_P (op0)
5642 || BINARY_CLASS_P (op0)
5643 || VL_EXP_CLASS_P (op0)
5644 || EXPRESSION_CLASS_P (op0))
5645 /* ... and has wrapping overflow, and its type is smaller
5646 than ctype, then we cannot pass through as widening. */
5647 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5648 && (TYPE_PRECISION (ctype)
5649 > TYPE_PRECISION (TREE_TYPE (op0))))
5650 /* ... or this is a truncation (t is narrower than op0),
5651 then we cannot pass through this narrowing. */
5652 || (TYPE_PRECISION (type)
5653 < TYPE_PRECISION (TREE_TYPE (op0)))
5654 /* ... or signedness changes for division or modulus,
5655 then we cannot pass through this conversion. */
5656 || (code != MULT_EXPR
5657 && (TYPE_UNSIGNED (ctype)
5658 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5659 /* ... or has undefined overflow while the converted to
5660 type has not, we cannot do the operation in the inner type
5661 as that would introduce undefined overflow. */
5662 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5663 && !TYPE_OVERFLOW_UNDEFINED (type))))
5664 break;
5666 /* Pass the constant down and see if we can make a simplification. If
5667 we can, replace this expression with the inner simplification for
5668 possible later conversion to our or some other type. */
5669 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5670 && TREE_CODE (t2) == INTEGER_CST
5671 && !TREE_OVERFLOW (t2)
5672 && (0 != (t1 = extract_muldiv (op0, t2, code,
5673 code == MULT_EXPR
5674 ? ctype : NULL_TREE,
5675 strict_overflow_p))))
5676 return t1;
5677 break;
5679 case ABS_EXPR:
5680 /* If widening the type changes it from signed to unsigned, then we
5681 must avoid building ABS_EXPR itself as unsigned. */
5682 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5684 tree cstype = (*signed_type_for) (ctype);
5685 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5686 != 0)
5688 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5689 return fold_convert (ctype, t1);
5691 break;
5693 /* If the constant is negative, we cannot simplify this. */
5694 if (tree_int_cst_sgn (c) == -1)
5695 break;
5696 /* FALLTHROUGH */
5697 case NEGATE_EXPR:
5698 /* For division and modulus, type can't be unsigned, as e.g.
5699 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5700 For signed types, even with wrapping overflow, this is fine. */
5701 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5702 break;
5703 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5704 != 0)
5705 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5706 break;
5708 case MIN_EXPR: case MAX_EXPR:
5709 /* If widening the type changes the signedness, then we can't perform
5710 this optimization as that changes the result. */
5711 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5712 break;
5714 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5715 sub_strict_overflow_p = false;
5716 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5717 &sub_strict_overflow_p)) != 0
5718 && (t2 = extract_muldiv (op1, c, code, wide_type,
5719 &sub_strict_overflow_p)) != 0)
5721 if (tree_int_cst_sgn (c) < 0)
5722 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5723 if (sub_strict_overflow_p)
5724 *strict_overflow_p = true;
5725 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5726 fold_convert (ctype, t2));
5728 break;
5730 case LSHIFT_EXPR: case RSHIFT_EXPR:
5731 /* If the second operand is constant, this is a multiplication
5732 or floor division, by a power of two, so we can treat it that
5733 way unless the multiplier or divisor overflows. Signed
5734 left-shift overflow is implementation-defined rather than
5735 undefined in C90, so do not convert signed left shift into
5736 multiplication. */
5737 if (TREE_CODE (op1) == INTEGER_CST
5738 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5739 /* const_binop may not detect overflow correctly,
5740 so check for it explicitly here. */
5741 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5742 && TREE_INT_CST_HIGH (op1) == 0
5743 && 0 != (t1 = fold_convert (ctype,
5744 const_binop (LSHIFT_EXPR,
5745 size_one_node,
5746 op1)))
5747 && !TREE_OVERFLOW (t1))
5748 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5749 ? MULT_EXPR : FLOOR_DIV_EXPR,
5750 ctype,
5751 fold_convert (ctype, op0),
5752 t1),
5753 c, code, wide_type, strict_overflow_p);
5754 break;
5756 case PLUS_EXPR: case MINUS_EXPR:
5757 /* See if we can eliminate the operation on both sides. If we can, we
5758 can return a new PLUS or MINUS. If we can't, the only remaining
5759 cases where we can do anything are if the second operand is a
5760 constant. */
5761 sub_strict_overflow_p = false;
5762 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5763 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5764 if (t1 != 0 && t2 != 0
5765 && (code == MULT_EXPR
5766 /* If not multiplication, we can only do this if both operands
5767 are divisible by c. */
5768 || (multiple_of_p (ctype, op0, c)
5769 && multiple_of_p (ctype, op1, c))))
5771 if (sub_strict_overflow_p)
5772 *strict_overflow_p = true;
5773 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5774 fold_convert (ctype, t2));
5777 /* If this was a subtraction, negate OP1 and set it to be an addition.
5778 This simplifies the logic below. */
5779 if (tcode == MINUS_EXPR)
5781 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5782 /* If OP1 was not easily negatable, the constant may be OP0. */
5783 if (TREE_CODE (op0) == INTEGER_CST)
5785 tree tem = op0;
5786 op0 = op1;
5787 op1 = tem;
5788 tem = t1;
5789 t1 = t2;
5790 t2 = tem;
5794 if (TREE_CODE (op1) != INTEGER_CST)
5795 break;
5797 /* If either OP1 or C are negative, this optimization is not safe for
5798 some of the division and remainder types while for others we need
5799 to change the code. */
5800 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5802 if (code == CEIL_DIV_EXPR)
5803 code = FLOOR_DIV_EXPR;
5804 else if (code == FLOOR_DIV_EXPR)
5805 code = CEIL_DIV_EXPR;
5806 else if (code != MULT_EXPR
5807 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5808 break;
5811 /* If it's a multiply or a division/modulus operation of a multiple
5812 of our constant, do the operation and verify it doesn't overflow. */
5813 if (code == MULT_EXPR
5814 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5816 op1 = const_binop (code, fold_convert (ctype, op1),
5817 fold_convert (ctype, c));
5818 /* We allow the constant to overflow with wrapping semantics. */
5819 if (op1 == 0
5820 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5821 break;
5823 else
5824 break;
5826 /* If we have an unsigned type, we cannot widen the operation since it
5827 will change the result if the original computation overflowed. */
5828 if (TYPE_UNSIGNED (ctype) && ctype != type)
5829 break;
5831 /* If we were able to eliminate our operation from the first side,
5832 apply our operation to the second side and reform the PLUS. */
5833 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5834 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5836 /* The last case is if we are a multiply. In that case, we can
5837 apply the distributive law to commute the multiply and addition
5838 if the multiplication of the constants doesn't overflow. */
5839 if (code == MULT_EXPR)
5840 return fold_build2 (tcode, ctype,
5841 fold_build2 (code, ctype,
5842 fold_convert (ctype, op0),
5843 fold_convert (ctype, c)),
5844 op1);
5846 break;
5848 case MULT_EXPR:
5849 /* We have a special case here if we are doing something like
5850 (C * 8) % 4 since we know that's zero. */
5851 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5852 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5853 /* If the multiplication can overflow we cannot optimize this. */
5854 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5855 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5856 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5858 *strict_overflow_p = true;
5859 return omit_one_operand (type, integer_zero_node, op0);
5862 /* ... fall through ... */
5864 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5865 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5866 /* If we can extract our operation from the LHS, do so and return a
5867 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5868 do something only if the second operand is a constant. */
5869 if (same_p
5870 && (t1 = extract_muldiv (op0, c, code, wide_type,
5871 strict_overflow_p)) != 0)
5872 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5873 fold_convert (ctype, op1));
5874 else if (tcode == MULT_EXPR && code == MULT_EXPR
5875 && (t1 = extract_muldiv (op1, c, code, wide_type,
5876 strict_overflow_p)) != 0)
5877 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5878 fold_convert (ctype, t1));
5879 else if (TREE_CODE (op1) != INTEGER_CST)
5880 return 0;
5882 /* If these are the same operation types, we can associate them
5883 assuming no overflow. */
5884 if (tcode == code)
5886 double_int mul;
5887 bool overflow_p;
5888 unsigned prec = TYPE_PRECISION (ctype);
5889 bool uns = TYPE_UNSIGNED (ctype);
5890 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
5891 double_int dic = tree_to_double_int (c).ext (prec, uns);
5892 mul = diop1.mul_with_sign (dic, false, &overflow_p);
5893 overflow_p = ((!uns && overflow_p)
5894 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5895 if (!double_int_fits_to_tree_p (ctype, mul)
5896 && ((uns && tcode != MULT_EXPR) || !uns))
5897 overflow_p = 1;
5898 if (!overflow_p)
5899 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5900 double_int_to_tree (ctype, mul));
5903 /* If these operations "cancel" each other, we have the main
5904 optimizations of this pass, which occur when either constant is a
5905 multiple of the other, in which case we replace this with either an
5906 operation or CODE or TCODE.
5908 If we have an unsigned type, we cannot do this since it will change
5909 the result if the original computation overflowed. */
5910 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5911 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5912 || (tcode == MULT_EXPR
5913 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5914 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5915 && code != MULT_EXPR)))
5917 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5919 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5920 *strict_overflow_p = true;
5921 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5922 fold_convert (ctype,
5923 const_binop (TRUNC_DIV_EXPR,
5924 op1, c)));
5926 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5928 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5929 *strict_overflow_p = true;
5930 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5931 fold_convert (ctype,
5932 const_binop (TRUNC_DIV_EXPR,
5933 c, op1)));
5936 break;
5938 default:
5939 break;
5942 return 0;
5945 /* Return a node which has the indicated constant VALUE (either 0 or
5946 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5947 and is of the indicated TYPE. */
5949 tree
5950 constant_boolean_node (bool value, tree type)
5952 if (type == integer_type_node)
5953 return value ? integer_one_node : integer_zero_node;
5954 else if (type == boolean_type_node)
5955 return value ? boolean_true_node : boolean_false_node;
5956 else if (TREE_CODE (type) == VECTOR_TYPE)
5957 return build_vector_from_val (type,
5958 build_int_cst (TREE_TYPE (type),
5959 value ? -1 : 0));
5960 else
5961 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5965 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5966 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5967 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5968 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5969 COND is the first argument to CODE; otherwise (as in the example
5970 given here), it is the second argument. TYPE is the type of the
5971 original expression. Return NULL_TREE if no simplification is
5972 possible. */
5974 static tree
5975 fold_binary_op_with_conditional_arg (location_t loc,
5976 enum tree_code code,
5977 tree type, tree op0, tree op1,
5978 tree cond, tree arg, int cond_first_p)
5980 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5981 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5982 tree test, true_value, false_value;
5983 tree lhs = NULL_TREE;
5984 tree rhs = NULL_TREE;
5985 enum tree_code cond_code = COND_EXPR;
5987 if (TREE_CODE (cond) == COND_EXPR
5988 || TREE_CODE (cond) == VEC_COND_EXPR)
5990 test = TREE_OPERAND (cond, 0);
5991 true_value = TREE_OPERAND (cond, 1);
5992 false_value = TREE_OPERAND (cond, 2);
5993 /* If this operand throws an expression, then it does not make
5994 sense to try to perform a logical or arithmetic operation
5995 involving it. */
5996 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5997 lhs = true_value;
5998 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5999 rhs = false_value;
6001 else
6003 tree testtype = TREE_TYPE (cond);
6004 test = cond;
6005 true_value = constant_boolean_node (true, testtype);
6006 false_value = constant_boolean_node (false, testtype);
6009 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6010 cond_code = VEC_COND_EXPR;
6012 /* This transformation is only worthwhile if we don't have to wrap ARG
6013 in a SAVE_EXPR and the operation can be simplified without recursing
6014 on at least one of the branches once its pushed inside the COND_EXPR. */
6015 if (!TREE_CONSTANT (arg)
6016 && (TREE_SIDE_EFFECTS (arg)
6017 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6018 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6019 return NULL_TREE;
6021 arg = fold_convert_loc (loc, arg_type, arg);
6022 if (lhs == 0)
6024 true_value = fold_convert_loc (loc, cond_type, true_value);
6025 if (cond_first_p)
6026 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6027 else
6028 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6030 if (rhs == 0)
6032 false_value = fold_convert_loc (loc, cond_type, false_value);
6033 if (cond_first_p)
6034 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6035 else
6036 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6039 /* Check that we have simplified at least one of the branches. */
6040 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6041 return NULL_TREE;
6043 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6047 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6049 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6050 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6051 ADDEND is the same as X.
6053 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6054 and finite. The problematic cases are when X is zero, and its mode
6055 has signed zeros. In the case of rounding towards -infinity,
6056 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6057 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6059 bool
6060 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6062 if (!real_zerop (addend))
6063 return false;
6065 /* Don't allow the fold with -fsignaling-nans. */
6066 if (HONOR_SNANS (TYPE_MODE (type)))
6067 return false;
6069 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6070 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6071 return true;
6073 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6074 if (TREE_CODE (addend) == REAL_CST
6075 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6076 negate = !negate;
6078 /* The mode has signed zeros, and we have to honor their sign.
6079 In this situation, there is only one case we can return true for.
6080 X - 0 is the same as X unless rounding towards -infinity is
6081 supported. */
6082 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6085 /* Subroutine of fold() that checks comparisons of built-in math
6086 functions against real constants.
6088 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6089 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6090 is the type of the result and ARG0 and ARG1 are the operands of the
6091 comparison. ARG1 must be a TREE_REAL_CST.
6093 The function returns the constant folded tree if a simplification
6094 can be made, and NULL_TREE otherwise. */
6096 static tree
6097 fold_mathfn_compare (location_t loc,
6098 enum built_in_function fcode, enum tree_code code,
6099 tree type, tree arg0, tree arg1)
6101 REAL_VALUE_TYPE c;
6103 if (BUILTIN_SQRT_P (fcode))
6105 tree arg = CALL_EXPR_ARG (arg0, 0);
6106 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6108 c = TREE_REAL_CST (arg1);
6109 if (REAL_VALUE_NEGATIVE (c))
6111 /* sqrt(x) < y is always false, if y is negative. */
6112 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6113 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6115 /* sqrt(x) > y is always true, if y is negative and we
6116 don't care about NaNs, i.e. negative values of x. */
6117 if (code == NE_EXPR || !HONOR_NANS (mode))
6118 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6120 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6121 return fold_build2_loc (loc, GE_EXPR, type, arg,
6122 build_real (TREE_TYPE (arg), dconst0));
6124 else if (code == GT_EXPR || code == GE_EXPR)
6126 REAL_VALUE_TYPE c2;
6128 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6129 real_convert (&c2, mode, &c2);
6131 if (REAL_VALUE_ISINF (c2))
6133 /* sqrt(x) > y is x == +Inf, when y is very large. */
6134 if (HONOR_INFINITIES (mode))
6135 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6136 build_real (TREE_TYPE (arg), c2));
6138 /* sqrt(x) > y is always false, when y is very large
6139 and we don't care about infinities. */
6140 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6143 /* sqrt(x) > c is the same as x > c*c. */
6144 return fold_build2_loc (loc, code, type, arg,
6145 build_real (TREE_TYPE (arg), c2));
6147 else if (code == LT_EXPR || code == LE_EXPR)
6149 REAL_VALUE_TYPE c2;
6151 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6152 real_convert (&c2, mode, &c2);
6154 if (REAL_VALUE_ISINF (c2))
6156 /* sqrt(x) < y is always true, when y is a very large
6157 value and we don't care about NaNs or Infinities. */
6158 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6159 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6161 /* sqrt(x) < y is x != +Inf when y is very large and we
6162 don't care about NaNs. */
6163 if (! HONOR_NANS (mode))
6164 return fold_build2_loc (loc, NE_EXPR, type, arg,
6165 build_real (TREE_TYPE (arg), c2));
6167 /* sqrt(x) < y is x >= 0 when y is very large and we
6168 don't care about Infinities. */
6169 if (! HONOR_INFINITIES (mode))
6170 return fold_build2_loc (loc, GE_EXPR, type, arg,
6171 build_real (TREE_TYPE (arg), dconst0));
6173 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6174 arg = save_expr (arg);
6175 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6176 fold_build2_loc (loc, GE_EXPR, type, arg,
6177 build_real (TREE_TYPE (arg),
6178 dconst0)),
6179 fold_build2_loc (loc, NE_EXPR, type, arg,
6180 build_real (TREE_TYPE (arg),
6181 c2)));
6184 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6185 if (! HONOR_NANS (mode))
6186 return fold_build2_loc (loc, code, type, arg,
6187 build_real (TREE_TYPE (arg), c2));
6189 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6190 arg = save_expr (arg);
6191 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6192 fold_build2_loc (loc, GE_EXPR, type, arg,
6193 build_real (TREE_TYPE (arg),
6194 dconst0)),
6195 fold_build2_loc (loc, code, type, arg,
6196 build_real (TREE_TYPE (arg),
6197 c2)));
6201 return NULL_TREE;
6204 /* Subroutine of fold() that optimizes comparisons against Infinities,
6205 either +Inf or -Inf.
6207 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6208 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6209 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6211 The function returns the constant folded tree if a simplification
6212 can be made, and NULL_TREE otherwise. */
6214 static tree
6215 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6216 tree arg0, tree arg1)
6218 enum machine_mode mode;
6219 REAL_VALUE_TYPE max;
6220 tree temp;
6221 bool neg;
6223 mode = TYPE_MODE (TREE_TYPE (arg0));
6225 /* For negative infinity swap the sense of the comparison. */
6226 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6227 if (neg)
6228 code = swap_tree_comparison (code);
6230 switch (code)
6232 case GT_EXPR:
6233 /* x > +Inf is always false, if with ignore sNANs. */
6234 if (HONOR_SNANS (mode))
6235 return NULL_TREE;
6236 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6238 case LE_EXPR:
6239 /* x <= +Inf is always true, if we don't case about NaNs. */
6240 if (! HONOR_NANS (mode))
6241 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6243 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6244 arg0 = save_expr (arg0);
6245 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6247 case EQ_EXPR:
6248 case GE_EXPR:
6249 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6250 real_maxval (&max, neg, mode);
6251 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6252 arg0, build_real (TREE_TYPE (arg0), max));
6254 case LT_EXPR:
6255 /* x < +Inf is always equal to x <= DBL_MAX. */
6256 real_maxval (&max, neg, mode);
6257 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6258 arg0, build_real (TREE_TYPE (arg0), max));
6260 case NE_EXPR:
6261 /* x != +Inf is always equal to !(x > DBL_MAX). */
6262 real_maxval (&max, neg, mode);
6263 if (! HONOR_NANS (mode))
6264 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6265 arg0, build_real (TREE_TYPE (arg0), max));
6267 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6268 arg0, build_real (TREE_TYPE (arg0), max));
6269 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6271 default:
6272 break;
6275 return NULL_TREE;
6278 /* Subroutine of fold() that optimizes comparisons of a division by
6279 a nonzero integer constant against an integer constant, i.e.
6280 X/C1 op C2.
6282 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6283 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6284 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6286 The function returns the constant folded tree if a simplification
6287 can be made, and NULL_TREE otherwise. */
6289 static tree
6290 fold_div_compare (location_t loc,
6291 enum tree_code code, tree type, tree arg0, tree arg1)
6293 tree prod, tmp, hi, lo;
6294 tree arg00 = TREE_OPERAND (arg0, 0);
6295 tree arg01 = TREE_OPERAND (arg0, 1);
6296 double_int val;
6297 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6298 bool neg_overflow;
6299 bool overflow;
6301 /* We have to do this the hard way to detect unsigned overflow.
6302 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6303 val = TREE_INT_CST (arg01)
6304 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6305 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6306 neg_overflow = false;
6308 if (unsigned_p)
6310 tmp = int_const_binop (MINUS_EXPR, arg01,
6311 build_int_cst (TREE_TYPE (arg01), 1));
6312 lo = prod;
6314 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6315 val = TREE_INT_CST (prod)
6316 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6317 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6318 -1, overflow | TREE_OVERFLOW (prod));
6320 else if (tree_int_cst_sgn (arg01) >= 0)
6322 tmp = int_const_binop (MINUS_EXPR, arg01,
6323 build_int_cst (TREE_TYPE (arg01), 1));
6324 switch (tree_int_cst_sgn (arg1))
6326 case -1:
6327 neg_overflow = true;
6328 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6329 hi = prod;
6330 break;
6332 case 0:
6333 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6334 hi = tmp;
6335 break;
6337 case 1:
6338 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6339 lo = prod;
6340 break;
6342 default:
6343 gcc_unreachable ();
6346 else
6348 /* A negative divisor reverses the relational operators. */
6349 code = swap_tree_comparison (code);
6351 tmp = int_const_binop (PLUS_EXPR, arg01,
6352 build_int_cst (TREE_TYPE (arg01), 1));
6353 switch (tree_int_cst_sgn (arg1))
6355 case -1:
6356 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6357 lo = prod;
6358 break;
6360 case 0:
6361 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6362 lo = tmp;
6363 break;
6365 case 1:
6366 neg_overflow = true;
6367 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6368 hi = prod;
6369 break;
6371 default:
6372 gcc_unreachable ();
6376 switch (code)
6378 case EQ_EXPR:
6379 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6380 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6381 if (TREE_OVERFLOW (hi))
6382 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6383 if (TREE_OVERFLOW (lo))
6384 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6385 return build_range_check (loc, type, arg00, 1, lo, hi);
6387 case NE_EXPR:
6388 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6389 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6390 if (TREE_OVERFLOW (hi))
6391 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6392 if (TREE_OVERFLOW (lo))
6393 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6394 return build_range_check (loc, type, arg00, 0, lo, hi);
6396 case LT_EXPR:
6397 if (TREE_OVERFLOW (lo))
6399 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6400 return omit_one_operand_loc (loc, type, tmp, arg00);
6402 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6404 case LE_EXPR:
6405 if (TREE_OVERFLOW (hi))
6407 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6408 return omit_one_operand_loc (loc, type, tmp, arg00);
6410 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6412 case GT_EXPR:
6413 if (TREE_OVERFLOW (hi))
6415 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6416 return omit_one_operand_loc (loc, type, tmp, arg00);
6418 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6420 case GE_EXPR:
6421 if (TREE_OVERFLOW (lo))
6423 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6424 return omit_one_operand_loc (loc, type, tmp, arg00);
6426 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6428 default:
6429 break;
6432 return NULL_TREE;
6436 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6437 equality/inequality test, then return a simplified form of the test
6438 using a sign testing. Otherwise return NULL. TYPE is the desired
6439 result type. */
6441 static tree
6442 fold_single_bit_test_into_sign_test (location_t loc,
6443 enum tree_code code, tree arg0, tree arg1,
6444 tree result_type)
6446 /* If this is testing a single bit, we can optimize the test. */
6447 if ((code == NE_EXPR || code == EQ_EXPR)
6448 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6449 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6451 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6452 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6453 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6455 if (arg00 != NULL_TREE
6456 /* This is only a win if casting to a signed type is cheap,
6457 i.e. when arg00's type is not a partial mode. */
6458 && TYPE_PRECISION (TREE_TYPE (arg00))
6459 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6461 tree stype = signed_type_for (TREE_TYPE (arg00));
6462 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6463 result_type,
6464 fold_convert_loc (loc, stype, arg00),
6465 build_int_cst (stype, 0));
6469 return NULL_TREE;
6472 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6473 equality/inequality test, then return a simplified form of
6474 the test using shifts and logical operations. Otherwise return
6475 NULL. TYPE is the desired result type. */
6477 tree
6478 fold_single_bit_test (location_t loc, enum tree_code code,
6479 tree arg0, tree arg1, tree result_type)
6481 /* If this is testing a single bit, we can optimize the test. */
6482 if ((code == NE_EXPR || code == EQ_EXPR)
6483 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6484 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6486 tree inner = TREE_OPERAND (arg0, 0);
6487 tree type = TREE_TYPE (arg0);
6488 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6489 enum machine_mode operand_mode = TYPE_MODE (type);
6490 int ops_unsigned;
6491 tree signed_type, unsigned_type, intermediate_type;
6492 tree tem, one;
6494 /* First, see if we can fold the single bit test into a sign-bit
6495 test. */
6496 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6497 result_type);
6498 if (tem)
6499 return tem;
6501 /* Otherwise we have (A & C) != 0 where C is a single bit,
6502 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6503 Similarly for (A & C) == 0. */
6505 /* If INNER is a right shift of a constant and it plus BITNUM does
6506 not overflow, adjust BITNUM and INNER. */
6507 if (TREE_CODE (inner) == RSHIFT_EXPR
6508 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6509 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6510 && bitnum < TYPE_PRECISION (type)
6511 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6512 bitnum - TYPE_PRECISION (type)))
6514 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6515 inner = TREE_OPERAND (inner, 0);
6518 /* If we are going to be able to omit the AND below, we must do our
6519 operations as unsigned. If we must use the AND, we have a choice.
6520 Normally unsigned is faster, but for some machines signed is. */
6521 #ifdef LOAD_EXTEND_OP
6522 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6523 && !flag_syntax_only) ? 0 : 1;
6524 #else
6525 ops_unsigned = 1;
6526 #endif
6528 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6529 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6530 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6531 inner = fold_convert_loc (loc, intermediate_type, inner);
6533 if (bitnum != 0)
6534 inner = build2 (RSHIFT_EXPR, intermediate_type,
6535 inner, size_int (bitnum));
6537 one = build_int_cst (intermediate_type, 1);
6539 if (code == EQ_EXPR)
6540 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6542 /* Put the AND last so it can combine with more things. */
6543 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6545 /* Make sure to return the proper type. */
6546 inner = fold_convert_loc (loc, result_type, inner);
6548 return inner;
6550 return NULL_TREE;
6553 /* Check whether we are allowed to reorder operands arg0 and arg1,
6554 such that the evaluation of arg1 occurs before arg0. */
6556 static bool
6557 reorder_operands_p (const_tree arg0, const_tree arg1)
6559 if (! flag_evaluation_order)
6560 return true;
6561 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6562 return true;
6563 return ! TREE_SIDE_EFFECTS (arg0)
6564 && ! TREE_SIDE_EFFECTS (arg1);
6567 /* Test whether it is preferable two swap two operands, ARG0 and
6568 ARG1, for example because ARG0 is an integer constant and ARG1
6569 isn't. If REORDER is true, only recommend swapping if we can
6570 evaluate the operands in reverse order. */
6572 bool
6573 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6575 STRIP_SIGN_NOPS (arg0);
6576 STRIP_SIGN_NOPS (arg1);
6578 if (TREE_CODE (arg1) == INTEGER_CST)
6579 return 0;
6580 if (TREE_CODE (arg0) == INTEGER_CST)
6581 return 1;
6583 if (TREE_CODE (arg1) == REAL_CST)
6584 return 0;
6585 if (TREE_CODE (arg0) == REAL_CST)
6586 return 1;
6588 if (TREE_CODE (arg1) == FIXED_CST)
6589 return 0;
6590 if (TREE_CODE (arg0) == FIXED_CST)
6591 return 1;
6593 if (TREE_CODE (arg1) == COMPLEX_CST)
6594 return 0;
6595 if (TREE_CODE (arg0) == COMPLEX_CST)
6596 return 1;
6598 if (TREE_CONSTANT (arg1))
6599 return 0;
6600 if (TREE_CONSTANT (arg0))
6601 return 1;
6603 if (optimize_function_for_size_p (cfun))
6604 return 0;
6606 if (reorder && flag_evaluation_order
6607 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6608 return 0;
6610 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6611 for commutative and comparison operators. Ensuring a canonical
6612 form allows the optimizers to find additional redundancies without
6613 having to explicitly check for both orderings. */
6614 if (TREE_CODE (arg0) == SSA_NAME
6615 && TREE_CODE (arg1) == SSA_NAME
6616 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6617 return 1;
6619 /* Put SSA_NAMEs last. */
6620 if (TREE_CODE (arg1) == SSA_NAME)
6621 return 0;
6622 if (TREE_CODE (arg0) == SSA_NAME)
6623 return 1;
6625 /* Put variables last. */
6626 if (DECL_P (arg1))
6627 return 0;
6628 if (DECL_P (arg0))
6629 return 1;
6631 return 0;
6634 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6635 ARG0 is extended to a wider type. */
6637 static tree
6638 fold_widened_comparison (location_t loc, enum tree_code code,
6639 tree type, tree arg0, tree arg1)
6641 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6642 tree arg1_unw;
6643 tree shorter_type, outer_type;
6644 tree min, max;
6645 bool above, below;
6647 if (arg0_unw == arg0)
6648 return NULL_TREE;
6649 shorter_type = TREE_TYPE (arg0_unw);
6651 #ifdef HAVE_canonicalize_funcptr_for_compare
6652 /* Disable this optimization if we're casting a function pointer
6653 type on targets that require function pointer canonicalization. */
6654 if (HAVE_canonicalize_funcptr_for_compare
6655 && TREE_CODE (shorter_type) == POINTER_TYPE
6656 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6657 return NULL_TREE;
6658 #endif
6660 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6661 return NULL_TREE;
6663 arg1_unw = get_unwidened (arg1, NULL_TREE);
6665 /* If possible, express the comparison in the shorter mode. */
6666 if ((code == EQ_EXPR || code == NE_EXPR
6667 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6668 && (TREE_TYPE (arg1_unw) == shorter_type
6669 || ((TYPE_PRECISION (shorter_type)
6670 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6671 && (TYPE_UNSIGNED (shorter_type)
6672 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6673 || (TREE_CODE (arg1_unw) == INTEGER_CST
6674 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6675 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6676 && int_fits_type_p (arg1_unw, shorter_type))))
6677 return fold_build2_loc (loc, code, type, arg0_unw,
6678 fold_convert_loc (loc, shorter_type, arg1_unw));
6680 if (TREE_CODE (arg1_unw) != INTEGER_CST
6681 || TREE_CODE (shorter_type) != INTEGER_TYPE
6682 || !int_fits_type_p (arg1_unw, shorter_type))
6683 return NULL_TREE;
6685 /* If we are comparing with the integer that does not fit into the range
6686 of the shorter type, the result is known. */
6687 outer_type = TREE_TYPE (arg1_unw);
6688 min = lower_bound_in_type (outer_type, shorter_type);
6689 max = upper_bound_in_type (outer_type, shorter_type);
6691 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6692 max, arg1_unw));
6693 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6694 arg1_unw, min));
6696 switch (code)
6698 case EQ_EXPR:
6699 if (above || below)
6700 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6701 break;
6703 case NE_EXPR:
6704 if (above || below)
6705 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6706 break;
6708 case LT_EXPR:
6709 case LE_EXPR:
6710 if (above)
6711 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6712 else if (below)
6713 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6715 case GT_EXPR:
6716 case GE_EXPR:
6717 if (above)
6718 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6719 else if (below)
6720 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6722 default:
6723 break;
6726 return NULL_TREE;
6729 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6730 ARG0 just the signedness is changed. */
6732 static tree
6733 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6734 tree arg0, tree arg1)
6736 tree arg0_inner;
6737 tree inner_type, outer_type;
6739 if (!CONVERT_EXPR_P (arg0))
6740 return NULL_TREE;
6742 outer_type = TREE_TYPE (arg0);
6743 arg0_inner = TREE_OPERAND (arg0, 0);
6744 inner_type = TREE_TYPE (arg0_inner);
6746 #ifdef HAVE_canonicalize_funcptr_for_compare
6747 /* Disable this optimization if we're casting a function pointer
6748 type on targets that require function pointer canonicalization. */
6749 if (HAVE_canonicalize_funcptr_for_compare
6750 && TREE_CODE (inner_type) == POINTER_TYPE
6751 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6752 return NULL_TREE;
6753 #endif
6755 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6756 return NULL_TREE;
6758 if (TREE_CODE (arg1) != INTEGER_CST
6759 && !(CONVERT_EXPR_P (arg1)
6760 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6761 return NULL_TREE;
6763 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6764 && code != NE_EXPR
6765 && code != EQ_EXPR)
6766 return NULL_TREE;
6768 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6769 return NULL_TREE;
6771 if (TREE_CODE (arg1) == INTEGER_CST)
6772 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6773 0, TREE_OVERFLOW (arg1));
6774 else
6775 arg1 = fold_convert_loc (loc, inner_type, arg1);
6777 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6780 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6781 step of the array. Reconstructs s and delta in the case of s *
6782 delta being an integer constant (and thus already folded). ADDR is
6783 the address. MULT is the multiplicative expression. If the
6784 function succeeds, the new address expression is returned.
6785 Otherwise NULL_TREE is returned. LOC is the location of the
6786 resulting expression. */
6788 static tree
6789 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6791 tree s, delta, step;
6792 tree ref = TREE_OPERAND (addr, 0), pref;
6793 tree ret, pos;
6794 tree itype;
6795 bool mdim = false;
6797 /* Strip the nops that might be added when converting op1 to sizetype. */
6798 STRIP_NOPS (op1);
6800 /* Canonicalize op1 into a possibly non-constant delta
6801 and an INTEGER_CST s. */
6802 if (TREE_CODE (op1) == MULT_EXPR)
6804 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6806 STRIP_NOPS (arg0);
6807 STRIP_NOPS (arg1);
6809 if (TREE_CODE (arg0) == INTEGER_CST)
6811 s = arg0;
6812 delta = arg1;
6814 else if (TREE_CODE (arg1) == INTEGER_CST)
6816 s = arg1;
6817 delta = arg0;
6819 else
6820 return NULL_TREE;
6822 else if (TREE_CODE (op1) == INTEGER_CST)
6824 delta = op1;
6825 s = NULL_TREE;
6827 else
6829 /* Simulate we are delta * 1. */
6830 delta = op1;
6831 s = integer_one_node;
6834 /* Handle &x.array the same as we would handle &x.array[0]. */
6835 if (TREE_CODE (ref) == COMPONENT_REF
6836 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6838 tree domain;
6840 /* Remember if this was a multi-dimensional array. */
6841 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6842 mdim = true;
6844 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6845 if (! domain)
6846 goto cont;
6847 itype = TREE_TYPE (domain);
6849 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6850 if (TREE_CODE (step) != INTEGER_CST)
6851 goto cont;
6853 if (s)
6855 if (! tree_int_cst_equal (step, s))
6856 goto cont;
6858 else
6860 /* Try if delta is a multiple of step. */
6861 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6862 if (! tmp)
6863 goto cont;
6864 delta = tmp;
6867 /* Only fold here if we can verify we do not overflow one
6868 dimension of a multi-dimensional array. */
6869 if (mdim)
6871 tree tmp;
6873 if (!TYPE_MIN_VALUE (domain)
6874 || !TYPE_MAX_VALUE (domain)
6875 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6876 goto cont;
6878 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6879 fold_convert_loc (loc, itype,
6880 TYPE_MIN_VALUE (domain)),
6881 fold_convert_loc (loc, itype, delta));
6882 if (TREE_CODE (tmp) != INTEGER_CST
6883 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6884 goto cont;
6887 /* We found a suitable component reference. */
6889 pref = TREE_OPERAND (addr, 0);
6890 ret = copy_node (pref);
6891 SET_EXPR_LOCATION (ret, loc);
6893 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6894 fold_build2_loc
6895 (loc, PLUS_EXPR, itype,
6896 fold_convert_loc (loc, itype,
6897 TYPE_MIN_VALUE
6898 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6899 fold_convert_loc (loc, itype, delta)),
6900 NULL_TREE, NULL_TREE);
6901 return build_fold_addr_expr_loc (loc, ret);
6904 cont:
6906 for (;; ref = TREE_OPERAND (ref, 0))
6908 if (TREE_CODE (ref) == ARRAY_REF)
6910 tree domain;
6912 /* Remember if this was a multi-dimensional array. */
6913 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6914 mdim = true;
6916 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6917 if (! domain)
6918 continue;
6919 itype = TREE_TYPE (domain);
6921 step = array_ref_element_size (ref);
6922 if (TREE_CODE (step) != INTEGER_CST)
6923 continue;
6925 if (s)
6927 if (! tree_int_cst_equal (step, s))
6928 continue;
6930 else
6932 /* Try if delta is a multiple of step. */
6933 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6934 if (! tmp)
6935 continue;
6936 delta = tmp;
6939 /* Only fold here if we can verify we do not overflow one
6940 dimension of a multi-dimensional array. */
6941 if (mdim)
6943 tree tmp;
6945 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6946 || !TYPE_MAX_VALUE (domain)
6947 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6948 continue;
6950 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6951 fold_convert_loc (loc, itype,
6952 TREE_OPERAND (ref, 1)),
6953 fold_convert_loc (loc, itype, delta));
6954 if (!tmp
6955 || TREE_CODE (tmp) != INTEGER_CST
6956 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6957 continue;
6960 break;
6962 else
6963 mdim = false;
6965 if (!handled_component_p (ref))
6966 return NULL_TREE;
6969 /* We found the suitable array reference. So copy everything up to it,
6970 and replace the index. */
6972 pref = TREE_OPERAND (addr, 0);
6973 ret = copy_node (pref);
6974 SET_EXPR_LOCATION (ret, loc);
6975 pos = ret;
6977 while (pref != ref)
6979 pref = TREE_OPERAND (pref, 0);
6980 TREE_OPERAND (pos, 0) = copy_node (pref);
6981 pos = TREE_OPERAND (pos, 0);
6984 TREE_OPERAND (pos, 1)
6985 = fold_build2_loc (loc, PLUS_EXPR, itype,
6986 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
6987 fold_convert_loc (loc, itype, delta));
6988 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6992 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6993 means A >= Y && A != MAX, but in this case we know that
6994 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6996 static tree
6997 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6999 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7001 if (TREE_CODE (bound) == LT_EXPR)
7002 a = TREE_OPERAND (bound, 0);
7003 else if (TREE_CODE (bound) == GT_EXPR)
7004 a = TREE_OPERAND (bound, 1);
7005 else
7006 return NULL_TREE;
7008 typea = TREE_TYPE (a);
7009 if (!INTEGRAL_TYPE_P (typea)
7010 && !POINTER_TYPE_P (typea))
7011 return NULL_TREE;
7013 if (TREE_CODE (ineq) == LT_EXPR)
7015 a1 = TREE_OPERAND (ineq, 1);
7016 y = TREE_OPERAND (ineq, 0);
7018 else if (TREE_CODE (ineq) == GT_EXPR)
7020 a1 = TREE_OPERAND (ineq, 0);
7021 y = TREE_OPERAND (ineq, 1);
7023 else
7024 return NULL_TREE;
7026 if (TREE_TYPE (a1) != typea)
7027 return NULL_TREE;
7029 if (POINTER_TYPE_P (typea))
7031 /* Convert the pointer types into integer before taking the difference. */
7032 tree ta = fold_convert_loc (loc, ssizetype, a);
7033 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7034 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7036 else
7037 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7039 if (!diff || !integer_onep (diff))
7040 return NULL_TREE;
7042 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7045 /* Fold a sum or difference of at least one multiplication.
7046 Returns the folded tree or NULL if no simplification could be made. */
7048 static tree
7049 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7050 tree arg0, tree arg1)
7052 tree arg00, arg01, arg10, arg11;
7053 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7055 /* (A * C) +- (B * C) -> (A+-B) * C.
7056 (A * C) +- A -> A * (C+-1).
7057 We are most concerned about the case where C is a constant,
7058 but other combinations show up during loop reduction. Since
7059 it is not difficult, try all four possibilities. */
7061 if (TREE_CODE (arg0) == MULT_EXPR)
7063 arg00 = TREE_OPERAND (arg0, 0);
7064 arg01 = TREE_OPERAND (arg0, 1);
7066 else if (TREE_CODE (arg0) == INTEGER_CST)
7068 arg00 = build_one_cst (type);
7069 arg01 = arg0;
7071 else
7073 /* We cannot generate constant 1 for fract. */
7074 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7075 return NULL_TREE;
7076 arg00 = arg0;
7077 arg01 = build_one_cst (type);
7079 if (TREE_CODE (arg1) == MULT_EXPR)
7081 arg10 = TREE_OPERAND (arg1, 0);
7082 arg11 = TREE_OPERAND (arg1, 1);
7084 else if (TREE_CODE (arg1) == INTEGER_CST)
7086 arg10 = build_one_cst (type);
7087 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7088 the purpose of this canonicalization. */
7089 if (TREE_INT_CST_HIGH (arg1) == -1
7090 && negate_expr_p (arg1)
7091 && code == PLUS_EXPR)
7093 arg11 = negate_expr (arg1);
7094 code = MINUS_EXPR;
7096 else
7097 arg11 = arg1;
7099 else
7101 /* We cannot generate constant 1 for fract. */
7102 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7103 return NULL_TREE;
7104 arg10 = arg1;
7105 arg11 = build_one_cst (type);
7107 same = NULL_TREE;
7109 if (operand_equal_p (arg01, arg11, 0))
7110 same = arg01, alt0 = arg00, alt1 = arg10;
7111 else if (operand_equal_p (arg00, arg10, 0))
7112 same = arg00, alt0 = arg01, alt1 = arg11;
7113 else if (operand_equal_p (arg00, arg11, 0))
7114 same = arg00, alt0 = arg01, alt1 = arg10;
7115 else if (operand_equal_p (arg01, arg10, 0))
7116 same = arg01, alt0 = arg00, alt1 = arg11;
7118 /* No identical multiplicands; see if we can find a common
7119 power-of-two factor in non-power-of-two multiplies. This
7120 can help in multi-dimensional array access. */
7121 else if (host_integerp (arg01, 0)
7122 && host_integerp (arg11, 0))
7124 HOST_WIDE_INT int01, int11, tmp;
7125 bool swap = false;
7126 tree maybe_same;
7127 int01 = TREE_INT_CST_LOW (arg01);
7128 int11 = TREE_INT_CST_LOW (arg11);
7130 /* Move min of absolute values to int11. */
7131 if (absu_hwi (int01) < absu_hwi (int11))
7133 tmp = int01, int01 = int11, int11 = tmp;
7134 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7135 maybe_same = arg01;
7136 swap = true;
7138 else
7139 maybe_same = arg11;
7141 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7142 /* The remainder should not be a constant, otherwise we
7143 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7144 increased the number of multiplications necessary. */
7145 && TREE_CODE (arg10) != INTEGER_CST)
7147 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7148 build_int_cst (TREE_TYPE (arg00),
7149 int01 / int11));
7150 alt1 = arg10;
7151 same = maybe_same;
7152 if (swap)
7153 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7157 if (same)
7158 return fold_build2_loc (loc, MULT_EXPR, type,
7159 fold_build2_loc (loc, code, type,
7160 fold_convert_loc (loc, type, alt0),
7161 fold_convert_loc (loc, type, alt1)),
7162 fold_convert_loc (loc, type, same));
7164 return NULL_TREE;
7167 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7168 specified by EXPR into the buffer PTR of length LEN bytes.
7169 Return the number of bytes placed in the buffer, or zero
7170 upon failure. */
7172 static int
7173 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7175 tree type = TREE_TYPE (expr);
7176 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7177 int byte, offset, word, words;
7178 unsigned char value;
7180 if (total_bytes > len)
7181 return 0;
7182 words = total_bytes / UNITS_PER_WORD;
7184 for (byte = 0; byte < total_bytes; byte++)
7186 int bitpos = byte * BITS_PER_UNIT;
7187 if (bitpos < HOST_BITS_PER_WIDE_INT)
7188 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7189 else
7190 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7191 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7193 if (total_bytes > UNITS_PER_WORD)
7195 word = byte / UNITS_PER_WORD;
7196 if (WORDS_BIG_ENDIAN)
7197 word = (words - 1) - word;
7198 offset = word * UNITS_PER_WORD;
7199 if (BYTES_BIG_ENDIAN)
7200 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7201 else
7202 offset += byte % UNITS_PER_WORD;
7204 else
7205 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7206 ptr[offset] = value;
7208 return total_bytes;
7212 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7213 specified by EXPR into the buffer PTR of length LEN bytes.
7214 Return the number of bytes placed in the buffer, or zero
7215 upon failure. */
7217 static int
7218 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7220 tree type = TREE_TYPE (expr);
7221 enum machine_mode mode = TYPE_MODE (type);
7222 int total_bytes = GET_MODE_SIZE (mode);
7223 FIXED_VALUE_TYPE value;
7224 tree i_value, i_type;
7226 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7227 return 0;
7229 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7231 if (NULL_TREE == i_type
7232 || TYPE_PRECISION (i_type) != total_bytes)
7233 return 0;
7235 value = TREE_FIXED_CST (expr);
7236 i_value = double_int_to_tree (i_type, value.data);
7238 return native_encode_int (i_value, ptr, len);
7242 /* Subroutine of native_encode_expr. Encode the REAL_CST
7243 specified by EXPR into the buffer PTR of length LEN bytes.
7244 Return the number of bytes placed in the buffer, or zero
7245 upon failure. */
7247 static int
7248 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7250 tree type = TREE_TYPE (expr);
7251 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7252 int byte, offset, word, words, bitpos;
7253 unsigned char value;
7255 /* There are always 32 bits in each long, no matter the size of
7256 the hosts long. We handle floating point representations with
7257 up to 192 bits. */
7258 long tmp[6];
7260 if (total_bytes > len)
7261 return 0;
7262 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7264 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7266 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7267 bitpos += BITS_PER_UNIT)
7269 byte = (bitpos / BITS_PER_UNIT) & 3;
7270 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7272 if (UNITS_PER_WORD < 4)
7274 word = byte / UNITS_PER_WORD;
7275 if (WORDS_BIG_ENDIAN)
7276 word = (words - 1) - word;
7277 offset = word * UNITS_PER_WORD;
7278 if (BYTES_BIG_ENDIAN)
7279 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7280 else
7281 offset += byte % UNITS_PER_WORD;
7283 else
7284 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7285 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7287 return total_bytes;
7290 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7291 specified by EXPR into the buffer PTR of length LEN bytes.
7292 Return the number of bytes placed in the buffer, or zero
7293 upon failure. */
7295 static int
7296 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7298 int rsize, isize;
7299 tree part;
7301 part = TREE_REALPART (expr);
7302 rsize = native_encode_expr (part, ptr, len);
7303 if (rsize == 0)
7304 return 0;
7305 part = TREE_IMAGPART (expr);
7306 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7307 if (isize != rsize)
7308 return 0;
7309 return rsize + isize;
7313 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7314 specified by EXPR into the buffer PTR of length LEN bytes.
7315 Return the number of bytes placed in the buffer, or zero
7316 upon failure. */
7318 static int
7319 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7321 unsigned i, count;
7322 int size, offset;
7323 tree itype, elem;
7325 offset = 0;
7326 count = VECTOR_CST_NELTS (expr);
7327 itype = TREE_TYPE (TREE_TYPE (expr));
7328 size = GET_MODE_SIZE (TYPE_MODE (itype));
7329 for (i = 0; i < count; i++)
7331 elem = VECTOR_CST_ELT (expr, i);
7332 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7333 return 0;
7334 offset += size;
7336 return offset;
7340 /* Subroutine of native_encode_expr. Encode the STRING_CST
7341 specified by EXPR into the buffer PTR of length LEN bytes.
7342 Return the number of bytes placed in the buffer, or zero
7343 upon failure. */
7345 static int
7346 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7348 tree type = TREE_TYPE (expr);
7349 HOST_WIDE_INT total_bytes;
7351 if (TREE_CODE (type) != ARRAY_TYPE
7352 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7353 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7354 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7355 return 0;
7356 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7357 if (total_bytes > len)
7358 return 0;
7359 if (TREE_STRING_LENGTH (expr) < total_bytes)
7361 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7362 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7363 total_bytes - TREE_STRING_LENGTH (expr));
7365 else
7366 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7367 return total_bytes;
7371 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7372 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7373 buffer PTR of length LEN bytes. Return the number of bytes
7374 placed in the buffer, or zero upon failure. */
7377 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7379 switch (TREE_CODE (expr))
7381 case INTEGER_CST:
7382 return native_encode_int (expr, ptr, len);
7384 case REAL_CST:
7385 return native_encode_real (expr, ptr, len);
7387 case FIXED_CST:
7388 return native_encode_fixed (expr, ptr, len);
7390 case COMPLEX_CST:
7391 return native_encode_complex (expr, ptr, len);
7393 case VECTOR_CST:
7394 return native_encode_vector (expr, ptr, len);
7396 case STRING_CST:
7397 return native_encode_string (expr, ptr, len);
7399 default:
7400 return 0;
7405 /* Subroutine of native_interpret_expr. Interpret the contents of
7406 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7407 If the buffer cannot be interpreted, return NULL_TREE. */
7409 static tree
7410 native_interpret_int (tree type, const unsigned char *ptr, int len)
7412 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7413 double_int result;
7415 if (total_bytes > len
7416 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7417 return NULL_TREE;
7419 result = double_int::from_buffer (ptr, total_bytes);
7421 return double_int_to_tree (type, result);
7425 /* Subroutine of native_interpret_expr. Interpret the contents of
7426 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7427 If the buffer cannot be interpreted, return NULL_TREE. */
7429 static tree
7430 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7432 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7433 double_int result;
7434 FIXED_VALUE_TYPE fixed_value;
7436 if (total_bytes > len
7437 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7438 return NULL_TREE;
7440 result = double_int::from_buffer (ptr, total_bytes);
7441 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7443 return build_fixed (type, fixed_value);
7447 /* Subroutine of native_interpret_expr. Interpret the contents of
7448 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7449 If the buffer cannot be interpreted, return NULL_TREE. */
7451 static tree
7452 native_interpret_real (tree type, const unsigned char *ptr, int len)
7454 enum machine_mode mode = TYPE_MODE (type);
7455 int total_bytes = GET_MODE_SIZE (mode);
7456 int byte, offset, word, words, bitpos;
7457 unsigned char value;
7458 /* There are always 32 bits in each long, no matter the size of
7459 the hosts long. We handle floating point representations with
7460 up to 192 bits. */
7461 REAL_VALUE_TYPE r;
7462 long tmp[6];
7464 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7465 if (total_bytes > len || total_bytes > 24)
7466 return NULL_TREE;
7467 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7469 memset (tmp, 0, sizeof (tmp));
7470 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7471 bitpos += BITS_PER_UNIT)
7473 byte = (bitpos / BITS_PER_UNIT) & 3;
7474 if (UNITS_PER_WORD < 4)
7476 word = byte / UNITS_PER_WORD;
7477 if (WORDS_BIG_ENDIAN)
7478 word = (words - 1) - word;
7479 offset = word * UNITS_PER_WORD;
7480 if (BYTES_BIG_ENDIAN)
7481 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7482 else
7483 offset += byte % UNITS_PER_WORD;
7485 else
7486 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7487 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7489 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7492 real_from_target (&r, tmp, mode);
7493 return build_real (type, r);
7497 /* Subroutine of native_interpret_expr. Interpret the contents of
7498 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7499 If the buffer cannot be interpreted, return NULL_TREE. */
7501 static tree
7502 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7504 tree etype, rpart, ipart;
7505 int size;
7507 etype = TREE_TYPE (type);
7508 size = GET_MODE_SIZE (TYPE_MODE (etype));
7509 if (size * 2 > len)
7510 return NULL_TREE;
7511 rpart = native_interpret_expr (etype, ptr, size);
7512 if (!rpart)
7513 return NULL_TREE;
7514 ipart = native_interpret_expr (etype, ptr+size, size);
7515 if (!ipart)
7516 return NULL_TREE;
7517 return build_complex (type, rpart, ipart);
7521 /* Subroutine of native_interpret_expr. Interpret the contents of
7522 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7523 If the buffer cannot be interpreted, return NULL_TREE. */
7525 static tree
7526 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7528 tree etype, elem;
7529 int i, size, count;
7530 tree *elements;
7532 etype = TREE_TYPE (type);
7533 size = GET_MODE_SIZE (TYPE_MODE (etype));
7534 count = TYPE_VECTOR_SUBPARTS (type);
7535 if (size * count > len)
7536 return NULL_TREE;
7538 elements = XALLOCAVEC (tree, count);
7539 for (i = count - 1; i >= 0; i--)
7541 elem = native_interpret_expr (etype, ptr+(i*size), size);
7542 if (!elem)
7543 return NULL_TREE;
7544 elements[i] = elem;
7546 return build_vector (type, elements);
7550 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7551 the buffer PTR of length LEN as a constant of type TYPE. For
7552 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7553 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7554 return NULL_TREE. */
7556 tree
7557 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7559 switch (TREE_CODE (type))
7561 case INTEGER_TYPE:
7562 case ENUMERAL_TYPE:
7563 case BOOLEAN_TYPE:
7564 case POINTER_TYPE:
7565 case REFERENCE_TYPE:
7566 return native_interpret_int (type, ptr, len);
7568 case REAL_TYPE:
7569 return native_interpret_real (type, ptr, len);
7571 case FIXED_POINT_TYPE:
7572 return native_interpret_fixed (type, ptr, len);
7574 case COMPLEX_TYPE:
7575 return native_interpret_complex (type, ptr, len);
7577 case VECTOR_TYPE:
7578 return native_interpret_vector (type, ptr, len);
7580 default:
7581 return NULL_TREE;
7585 /* Returns true if we can interpret the contents of a native encoding
7586 as TYPE. */
7588 static bool
7589 can_native_interpret_type_p (tree type)
7591 switch (TREE_CODE (type))
7593 case INTEGER_TYPE:
7594 case ENUMERAL_TYPE:
7595 case BOOLEAN_TYPE:
7596 case POINTER_TYPE:
7597 case REFERENCE_TYPE:
7598 case FIXED_POINT_TYPE:
7599 case REAL_TYPE:
7600 case COMPLEX_TYPE:
7601 case VECTOR_TYPE:
7602 return true;
7603 default:
7604 return false;
7608 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7609 TYPE at compile-time. If we're unable to perform the conversion
7610 return NULL_TREE. */
7612 static tree
7613 fold_view_convert_expr (tree type, tree expr)
7615 /* We support up to 512-bit values (for V8DFmode). */
7616 unsigned char buffer[64];
7617 int len;
7619 /* Check that the host and target are sane. */
7620 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7621 return NULL_TREE;
7623 len = native_encode_expr (expr, buffer, sizeof (buffer));
7624 if (len == 0)
7625 return NULL_TREE;
7627 return native_interpret_expr (type, buffer, len);
7630 /* Build an expression for the address of T. Folds away INDIRECT_REF
7631 to avoid confusing the gimplify process. */
7633 tree
7634 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7636 /* The size of the object is not relevant when talking about its address. */
7637 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7638 t = TREE_OPERAND (t, 0);
7640 if (TREE_CODE (t) == INDIRECT_REF)
7642 t = TREE_OPERAND (t, 0);
7644 if (TREE_TYPE (t) != ptrtype)
7645 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7647 else if (TREE_CODE (t) == MEM_REF
7648 && integer_zerop (TREE_OPERAND (t, 1)))
7649 return TREE_OPERAND (t, 0);
7650 else if (TREE_CODE (t) == MEM_REF
7651 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7652 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7653 TREE_OPERAND (t, 0),
7654 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7655 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7657 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7659 if (TREE_TYPE (t) != ptrtype)
7660 t = fold_convert_loc (loc, ptrtype, t);
7662 else
7663 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7665 return t;
7668 /* Build an expression for the address of T. */
7670 tree
7671 build_fold_addr_expr_loc (location_t loc, tree t)
7673 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7675 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7678 static bool vec_cst_ctor_to_array (tree, tree *);
7680 /* Fold a unary expression of code CODE and type TYPE with operand
7681 OP0. Return the folded expression if folding is successful.
7682 Otherwise, return NULL_TREE. */
7684 tree
7685 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7687 tree tem;
7688 tree arg0;
7689 enum tree_code_class kind = TREE_CODE_CLASS (code);
7691 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7692 && TREE_CODE_LENGTH (code) == 1);
7694 arg0 = op0;
7695 if (arg0)
7697 if (CONVERT_EXPR_CODE_P (code)
7698 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7700 /* Don't use STRIP_NOPS, because signedness of argument type
7701 matters. */
7702 STRIP_SIGN_NOPS (arg0);
7704 else
7706 /* Strip any conversions that don't change the mode. This
7707 is safe for every expression, except for a comparison
7708 expression because its signedness is derived from its
7709 operands.
7711 Note that this is done as an internal manipulation within
7712 the constant folder, in order to find the simplest
7713 representation of the arguments so that their form can be
7714 studied. In any cases, the appropriate type conversions
7715 should be put back in the tree that will get out of the
7716 constant folder. */
7717 STRIP_NOPS (arg0);
7721 if (TREE_CODE_CLASS (code) == tcc_unary)
7723 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7724 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7725 fold_build1_loc (loc, code, type,
7726 fold_convert_loc (loc, TREE_TYPE (op0),
7727 TREE_OPERAND (arg0, 1))));
7728 else if (TREE_CODE (arg0) == COND_EXPR)
7730 tree arg01 = TREE_OPERAND (arg0, 1);
7731 tree arg02 = TREE_OPERAND (arg0, 2);
7732 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7733 arg01 = fold_build1_loc (loc, code, type,
7734 fold_convert_loc (loc,
7735 TREE_TYPE (op0), arg01));
7736 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7737 arg02 = fold_build1_loc (loc, code, type,
7738 fold_convert_loc (loc,
7739 TREE_TYPE (op0), arg02));
7740 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7741 arg01, arg02);
7743 /* If this was a conversion, and all we did was to move into
7744 inside the COND_EXPR, bring it back out. But leave it if
7745 it is a conversion from integer to integer and the
7746 result precision is no wider than a word since such a
7747 conversion is cheap and may be optimized away by combine,
7748 while it couldn't if it were outside the COND_EXPR. Then return
7749 so we don't get into an infinite recursion loop taking the
7750 conversion out and then back in. */
7752 if ((CONVERT_EXPR_CODE_P (code)
7753 || code == NON_LVALUE_EXPR)
7754 && TREE_CODE (tem) == COND_EXPR
7755 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7756 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7757 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7758 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7759 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7760 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7761 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7762 && (INTEGRAL_TYPE_P
7763 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7764 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7765 || flag_syntax_only))
7766 tem = build1_loc (loc, code, type,
7767 build3 (COND_EXPR,
7768 TREE_TYPE (TREE_OPERAND
7769 (TREE_OPERAND (tem, 1), 0)),
7770 TREE_OPERAND (tem, 0),
7771 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7772 TREE_OPERAND (TREE_OPERAND (tem, 2),
7773 0)));
7774 return tem;
7778 switch (code)
7780 case PAREN_EXPR:
7781 /* Re-association barriers around constants and other re-association
7782 barriers can be removed. */
7783 if (CONSTANT_CLASS_P (op0)
7784 || TREE_CODE (op0) == PAREN_EXPR)
7785 return fold_convert_loc (loc, type, op0);
7786 return NULL_TREE;
7788 CASE_CONVERT:
7789 case FLOAT_EXPR:
7790 case FIX_TRUNC_EXPR:
7791 if (TREE_TYPE (op0) == type)
7792 return op0;
7794 if (COMPARISON_CLASS_P (op0))
7796 /* If we have (type) (a CMP b) and type is an integral type, return
7797 new expression involving the new type. Canonicalize
7798 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7799 non-integral type.
7800 Do not fold the result as that would not simplify further, also
7801 folding again results in recursions. */
7802 if (TREE_CODE (type) == BOOLEAN_TYPE)
7803 return build2_loc (loc, TREE_CODE (op0), type,
7804 TREE_OPERAND (op0, 0),
7805 TREE_OPERAND (op0, 1));
7806 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7807 && TREE_CODE (type) != VECTOR_TYPE)
7808 return build3_loc (loc, COND_EXPR, type, op0,
7809 constant_boolean_node (true, type),
7810 constant_boolean_node (false, type));
7813 /* Handle cases of two conversions in a row. */
7814 if (CONVERT_EXPR_P (op0))
7816 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7817 tree inter_type = TREE_TYPE (op0);
7818 int inside_int = INTEGRAL_TYPE_P (inside_type);
7819 int inside_ptr = POINTER_TYPE_P (inside_type);
7820 int inside_float = FLOAT_TYPE_P (inside_type);
7821 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7822 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7823 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7824 int inter_int = INTEGRAL_TYPE_P (inter_type);
7825 int inter_ptr = POINTER_TYPE_P (inter_type);
7826 int inter_float = FLOAT_TYPE_P (inter_type);
7827 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7828 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7829 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7830 int final_int = INTEGRAL_TYPE_P (type);
7831 int final_ptr = POINTER_TYPE_P (type);
7832 int final_float = FLOAT_TYPE_P (type);
7833 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7834 unsigned int final_prec = TYPE_PRECISION (type);
7835 int final_unsignedp = TYPE_UNSIGNED (type);
7837 /* check for cases specific to UPC, involving pointer types */
7838 if (final_ptr || inter_ptr || inside_ptr)
7840 int final_pts = final_ptr
7841 && upc_shared_type_p (TREE_TYPE (type));
7842 int inter_pts = inter_ptr
7843 && upc_shared_type_p (TREE_TYPE (inter_type));
7844 int inside_pts = inside_ptr
7845 && upc_shared_type_p (TREE_TYPE (inside_type));
7846 if (final_pts || inter_pts || inside_pts)
7848 if (!((final_pts && inter_pts)
7849 && TREE_TYPE (type) == TREE_TYPE (inter_type))
7850 || ((inter_pts && inside_pts)
7851 && (TREE_TYPE (inter_type)
7852 == TREE_TYPE (inside_type))))
7853 return NULL;
7857 /* In addition to the cases of two conversions in a row
7858 handled below, if we are converting something to its own
7859 type via an object of identical or wider precision, neither
7860 conversion is needed. */
7861 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7862 && (((inter_int || inter_ptr) && final_int)
7863 || (inter_float && final_float))
7864 && inter_prec >= final_prec)
7865 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7867 /* Likewise, if the intermediate and initial types are either both
7868 float or both integer, we don't need the middle conversion if the
7869 former is wider than the latter and doesn't change the signedness
7870 (for integers). Avoid this if the final type is a pointer since
7871 then we sometimes need the middle conversion. Likewise if the
7872 final type has a precision not equal to the size of its mode. */
7873 if (((inter_int && inside_int)
7874 || (inter_float && inside_float)
7875 || (inter_vec && inside_vec))
7876 && inter_prec >= inside_prec
7877 && (inter_float || inter_vec
7878 || inter_unsignedp == inside_unsignedp)
7879 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7880 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7881 && ! final_ptr
7882 && (! final_vec || inter_prec == inside_prec))
7883 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7885 /* If we have a sign-extension of a zero-extended value, we can
7886 replace that by a single zero-extension. Likewise if the
7887 final conversion does not change precision we can drop the
7888 intermediate conversion. */
7889 if (inside_int && inter_int && final_int
7890 && ((inside_prec < inter_prec && inter_prec < final_prec
7891 && inside_unsignedp && !inter_unsignedp)
7892 || final_prec == inter_prec))
7893 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7895 /* Two conversions in a row are not needed unless:
7896 - some conversion is floating-point (overstrict for now), or
7897 - some conversion is a vector (overstrict for now), or
7898 - the intermediate type is narrower than both initial and
7899 final, or
7900 - the intermediate type and innermost type differ in signedness,
7901 and the outermost type is wider than the intermediate, or
7902 - the initial type is a pointer type and the precisions of the
7903 intermediate and final types differ, or
7904 - the final type is a pointer type and the precisions of the
7905 initial and intermediate types differ. */
7906 if (! inside_float && ! inter_float && ! final_float
7907 && ! inside_vec && ! inter_vec && ! final_vec
7908 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7909 && ! (inside_int && inter_int
7910 && inter_unsignedp != inside_unsignedp
7911 && inter_prec < final_prec)
7912 && ((inter_unsignedp && inter_prec > inside_prec)
7913 == (final_unsignedp && final_prec > inter_prec))
7914 && ! (inside_ptr && inter_prec != final_prec)
7915 && ! (final_ptr && inside_prec != inter_prec)
7916 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7917 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7918 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7921 /* Handle (T *)&A.B.C for A being of type T and B and C
7922 living at offset zero. This occurs frequently in
7923 C++ upcasting and then accessing the base. */
7924 if (TREE_CODE (op0) == ADDR_EXPR
7925 && POINTER_TYPE_P (type)
7926 && handled_component_p (TREE_OPERAND (op0, 0)))
7928 HOST_WIDE_INT bitsize, bitpos;
7929 tree offset;
7930 enum machine_mode mode;
7931 int unsignedp, volatilep;
7932 tree base = TREE_OPERAND (op0, 0);
7933 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7934 &mode, &unsignedp, &volatilep, false);
7935 /* If the reference was to a (constant) zero offset, we can use
7936 the address of the base if it has the same base type
7937 as the result type and the pointer type is unqualified. */
7938 if (! offset && bitpos == 0
7939 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7940 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7941 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7942 return fold_convert_loc (loc, type,
7943 build_fold_addr_expr_loc (loc, base));
7946 if (TREE_CODE (op0) == MODIFY_EXPR
7947 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7948 /* Detect assigning a bitfield. */
7949 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7950 && DECL_BIT_FIELD
7951 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7953 /* Don't leave an assignment inside a conversion
7954 unless assigning a bitfield. */
7955 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7956 /* First do the assignment, then return converted constant. */
7957 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7958 TREE_NO_WARNING (tem) = 1;
7959 TREE_USED (tem) = 1;
7960 return tem;
7963 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7964 constants (if x has signed type, the sign bit cannot be set
7965 in c). This folds extension into the BIT_AND_EXPR.
7966 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7967 very likely don't have maximal range for their precision and this
7968 transformation effectively doesn't preserve non-maximal ranges. */
7969 if (TREE_CODE (type) == INTEGER_TYPE
7970 && TREE_CODE (op0) == BIT_AND_EXPR
7971 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7973 tree and_expr = op0;
7974 tree and0 = TREE_OPERAND (and_expr, 0);
7975 tree and1 = TREE_OPERAND (and_expr, 1);
7976 int change = 0;
7978 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7979 || (TYPE_PRECISION (type)
7980 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7981 change = 1;
7982 else if (TYPE_PRECISION (TREE_TYPE (and1))
7983 <= HOST_BITS_PER_WIDE_INT
7984 && host_integerp (and1, 1))
7986 unsigned HOST_WIDE_INT cst;
7988 cst = tree_low_cst (and1, 1);
7989 cst &= (HOST_WIDE_INT) -1
7990 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7991 change = (cst == 0);
7992 #ifdef LOAD_EXTEND_OP
7993 if (change
7994 && !flag_syntax_only
7995 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7996 == ZERO_EXTEND))
7998 tree uns = unsigned_type_for (TREE_TYPE (and0));
7999 and0 = fold_convert_loc (loc, uns, and0);
8000 and1 = fold_convert_loc (loc, uns, and1);
8002 #endif
8004 if (change)
8006 tem = force_fit_type_double (type, tree_to_double_int (and1),
8007 0, TREE_OVERFLOW (and1));
8008 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8009 fold_convert_loc (loc, type, and0), tem);
8013 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8014 when one of the new casts will fold away. Conservatively we assume
8015 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8016 if (POINTER_TYPE_P (type)
8017 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8018 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8019 && !upc_shared_type_p (TREE_TYPE (type))
8020 && !upc_shared_type_p (TREE_TYPE (
8021 TREE_TYPE (TREE_OPERAND (arg0, 0))))
8022 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8023 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8024 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8026 tree arg00 = TREE_OPERAND (arg0, 0);
8027 tree arg01 = TREE_OPERAND (arg0, 1);
8029 return fold_build_pointer_plus_loc
8030 (loc, fold_convert_loc (loc, type, arg00), arg01);
8033 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8034 of the same precision, and X is an integer type not narrower than
8035 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8036 if (INTEGRAL_TYPE_P (type)
8037 && TREE_CODE (op0) == BIT_NOT_EXPR
8038 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8039 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8040 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8042 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8043 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8044 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8045 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8046 fold_convert_loc (loc, type, tem));
8049 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8050 type of X and Y (integer types only). */
8051 if (INTEGRAL_TYPE_P (type)
8052 && TREE_CODE (op0) == MULT_EXPR
8053 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8054 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8056 /* Be careful not to introduce new overflows. */
8057 tree mult_type;
8058 if (TYPE_OVERFLOW_WRAPS (type))
8059 mult_type = type;
8060 else
8061 mult_type = unsigned_type_for (type);
8063 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8065 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8066 fold_convert_loc (loc, mult_type,
8067 TREE_OPERAND (op0, 0)),
8068 fold_convert_loc (loc, mult_type,
8069 TREE_OPERAND (op0, 1)));
8070 return fold_convert_loc (loc, type, tem);
8074 tem = fold_convert_const (code, type, op0);
8075 return tem ? tem : NULL_TREE;
8077 case ADDR_SPACE_CONVERT_EXPR:
8078 if (integer_zerop (arg0))
8079 return fold_convert_const (code, type, arg0);
8080 return NULL_TREE;
8082 case FIXED_CONVERT_EXPR:
8083 tem = fold_convert_const (code, type, arg0);
8084 return tem ? tem : NULL_TREE;
8086 case VIEW_CONVERT_EXPR:
8087 if (TREE_TYPE (op0) == type)
8088 return op0;
8089 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8090 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8091 type, TREE_OPERAND (op0, 0));
8092 if (TREE_CODE (op0) == MEM_REF)
8093 return fold_build2_loc (loc, MEM_REF, type,
8094 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8096 /* For integral conversions with the same precision or pointer
8097 conversions use a NOP_EXPR instead. */
8098 if ((INTEGRAL_TYPE_P (type)
8099 || (POINTER_TYPE_P (type)
8100 && !upc_shared_type_p (TREE_TYPE (type))))
8101 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8102 || (POINTER_TYPE_P (TREE_TYPE (op0))
8103 && !upc_shared_type_p (TREE_TYPE (TREE_TYPE (op0)))))
8104 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8105 return fold_convert_loc (loc, type, op0);
8107 /* Strip inner integral conversions that do not change the precision. */
8108 if (CONVERT_EXPR_P (op0)
8109 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8110 || (POINTER_TYPE_P (TREE_TYPE (op0))
8111 && !upc_shared_type_p (TREE_TYPE (TREE_TYPE (op0)))))
8112 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8113 || (POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8114 && !upc_shared_type_p (TREE_TYPE (
8115 TREE_TYPE (
8116 TREE_OPERAND (op0, 0))))))
8117 && (TYPE_PRECISION (TREE_TYPE (op0))
8118 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8119 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8120 type, TREE_OPERAND (op0, 0));
8122 return fold_view_convert_expr (type, op0);
8124 case NEGATE_EXPR:
8125 tem = fold_negate_expr (loc, arg0);
8126 if (tem)
8127 return fold_convert_loc (loc, type, tem);
8128 return NULL_TREE;
8130 case ABS_EXPR:
8131 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8132 return fold_abs_const (arg0, type);
8133 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8134 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8135 /* Convert fabs((double)float) into (double)fabsf(float). */
8136 else if (TREE_CODE (arg0) == NOP_EXPR
8137 && TREE_CODE (type) == REAL_TYPE)
8139 tree targ0 = strip_float_extensions (arg0);
8140 if (targ0 != arg0)
8141 return fold_convert_loc (loc, type,
8142 fold_build1_loc (loc, ABS_EXPR,
8143 TREE_TYPE (targ0),
8144 targ0));
8146 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8147 else if (TREE_CODE (arg0) == ABS_EXPR)
8148 return arg0;
8149 else if (tree_expr_nonnegative_p (arg0))
8150 return arg0;
8152 /* Strip sign ops from argument. */
8153 if (TREE_CODE (type) == REAL_TYPE)
8155 tem = fold_strip_sign_ops (arg0);
8156 if (tem)
8157 return fold_build1_loc (loc, ABS_EXPR, type,
8158 fold_convert_loc (loc, type, tem));
8160 return NULL_TREE;
8162 case CONJ_EXPR:
8163 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8164 return fold_convert_loc (loc, type, arg0);
8165 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8167 tree itype = TREE_TYPE (type);
8168 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8169 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8170 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8171 negate_expr (ipart));
8173 if (TREE_CODE (arg0) == COMPLEX_CST)
8175 tree itype = TREE_TYPE (type);
8176 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8177 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8178 return build_complex (type, rpart, negate_expr (ipart));
8180 if (TREE_CODE (arg0) == CONJ_EXPR)
8181 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8182 return NULL_TREE;
8184 case BIT_NOT_EXPR:
8185 if (TREE_CODE (arg0) == INTEGER_CST)
8186 return fold_not_const (arg0, type);
8187 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8188 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8189 /* Convert ~ (-A) to A - 1. */
8190 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8191 return fold_build2_loc (loc, MINUS_EXPR, type,
8192 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8193 build_int_cst (type, 1));
8194 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8195 else if (INTEGRAL_TYPE_P (type)
8196 && ((TREE_CODE (arg0) == MINUS_EXPR
8197 && integer_onep (TREE_OPERAND (arg0, 1)))
8198 || (TREE_CODE (arg0) == PLUS_EXPR
8199 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8200 return fold_build1_loc (loc, NEGATE_EXPR, type,
8201 fold_convert_loc (loc, type,
8202 TREE_OPERAND (arg0, 0)));
8203 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8204 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8205 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8206 fold_convert_loc (loc, type,
8207 TREE_OPERAND (arg0, 0)))))
8208 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8209 fold_convert_loc (loc, type,
8210 TREE_OPERAND (arg0, 1)));
8211 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8212 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8213 fold_convert_loc (loc, type,
8214 TREE_OPERAND (arg0, 1)))))
8215 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8216 fold_convert_loc (loc, type,
8217 TREE_OPERAND (arg0, 0)), tem);
8218 /* Perform BIT_NOT_EXPR on each element individually. */
8219 else if (TREE_CODE (arg0) == VECTOR_CST)
8221 tree *elements;
8222 tree elem;
8223 unsigned count = VECTOR_CST_NELTS (arg0), i;
8225 elements = XALLOCAVEC (tree, count);
8226 for (i = 0; i < count; i++)
8228 elem = VECTOR_CST_ELT (arg0, i);
8229 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8230 if (elem == NULL_TREE)
8231 break;
8232 elements[i] = elem;
8234 if (i == count)
8235 return build_vector (type, elements);
8238 return NULL_TREE;
8240 case TRUTH_NOT_EXPR:
8241 /* The argument to invert_truthvalue must have Boolean type. */
8242 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8243 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8245 /* Note that the operand of this must be an int
8246 and its values must be 0 or 1.
8247 ("true" is a fixed value perhaps depending on the language,
8248 but we don't handle values other than 1 correctly yet.) */
8249 tem = fold_truth_not_expr (loc, arg0);
8250 if (!tem)
8251 return NULL_TREE;
8252 return fold_convert_loc (loc, type, tem);
8254 case REALPART_EXPR:
8255 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8256 return fold_convert_loc (loc, type, arg0);
8257 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8258 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8259 TREE_OPERAND (arg0, 1));
8260 if (TREE_CODE (arg0) == COMPLEX_CST)
8261 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8262 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8264 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8265 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8266 fold_build1_loc (loc, REALPART_EXPR, itype,
8267 TREE_OPERAND (arg0, 0)),
8268 fold_build1_loc (loc, REALPART_EXPR, itype,
8269 TREE_OPERAND (arg0, 1)));
8270 return fold_convert_loc (loc, type, tem);
8272 if (TREE_CODE (arg0) == CONJ_EXPR)
8274 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8275 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8276 TREE_OPERAND (arg0, 0));
8277 return fold_convert_loc (loc, type, tem);
8279 if (TREE_CODE (arg0) == CALL_EXPR)
8281 tree fn = get_callee_fndecl (arg0);
8282 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8283 switch (DECL_FUNCTION_CODE (fn))
8285 CASE_FLT_FN (BUILT_IN_CEXPI):
8286 fn = mathfn_built_in (type, BUILT_IN_COS);
8287 if (fn)
8288 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8289 break;
8291 default:
8292 break;
8295 return NULL_TREE;
8297 case IMAGPART_EXPR:
8298 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8299 return build_zero_cst (type);
8300 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8301 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8302 TREE_OPERAND (arg0, 0));
8303 if (TREE_CODE (arg0) == COMPLEX_CST)
8304 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8305 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8307 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8308 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8309 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8310 TREE_OPERAND (arg0, 0)),
8311 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8312 TREE_OPERAND (arg0, 1)));
8313 return fold_convert_loc (loc, type, tem);
8315 if (TREE_CODE (arg0) == CONJ_EXPR)
8317 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8318 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8319 return fold_convert_loc (loc, type, negate_expr (tem));
8321 if (TREE_CODE (arg0) == CALL_EXPR)
8323 tree fn = get_callee_fndecl (arg0);
8324 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8325 switch (DECL_FUNCTION_CODE (fn))
8327 CASE_FLT_FN (BUILT_IN_CEXPI):
8328 fn = mathfn_built_in (type, BUILT_IN_SIN);
8329 if (fn)
8330 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8331 break;
8333 default:
8334 break;
8337 return NULL_TREE;
8339 case INDIRECT_REF:
8340 /* Fold *&X to X if X is an lvalue. */
8341 if (TREE_CODE (op0) == ADDR_EXPR)
8343 tree op00 = TREE_OPERAND (op0, 0);
8344 if ((TREE_CODE (op00) == VAR_DECL
8345 || TREE_CODE (op00) == PARM_DECL
8346 || TREE_CODE (op00) == RESULT_DECL)
8347 && !TREE_READONLY (op00))
8348 return op00;
8350 return NULL_TREE;
8352 case VEC_UNPACK_LO_EXPR:
8353 case VEC_UNPACK_HI_EXPR:
8354 case VEC_UNPACK_FLOAT_LO_EXPR:
8355 case VEC_UNPACK_FLOAT_HI_EXPR:
8357 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8358 tree *elts;
8359 enum tree_code subcode;
8361 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8362 if (TREE_CODE (arg0) != VECTOR_CST)
8363 return NULL_TREE;
8365 elts = XALLOCAVEC (tree, nelts * 2);
8366 if (!vec_cst_ctor_to_array (arg0, elts))
8367 return NULL_TREE;
8369 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8370 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8371 elts += nelts;
8373 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8374 subcode = NOP_EXPR;
8375 else
8376 subcode = FLOAT_EXPR;
8378 for (i = 0; i < nelts; i++)
8380 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8381 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8382 return NULL_TREE;
8385 return build_vector (type, elts);
8388 case REDUC_MIN_EXPR:
8389 case REDUC_MAX_EXPR:
8390 case REDUC_PLUS_EXPR:
8392 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8393 tree *elts;
8394 enum tree_code subcode;
8396 if (TREE_CODE (op0) != VECTOR_CST)
8397 return NULL_TREE;
8399 elts = XALLOCAVEC (tree, nelts);
8400 if (!vec_cst_ctor_to_array (op0, elts))
8401 return NULL_TREE;
8403 switch (code)
8405 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8406 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8407 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8408 default: gcc_unreachable ();
8411 for (i = 1; i < nelts; i++)
8413 elts[0] = const_binop (subcode, elts[0], elts[i]);
8414 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8415 return NULL_TREE;
8416 elts[i] = build_zero_cst (TREE_TYPE (type));
8419 return build_vector (type, elts);
8422 default:
8423 return NULL_TREE;
8424 } /* switch (code) */
8428 /* If the operation was a conversion do _not_ mark a resulting constant
8429 with TREE_OVERFLOW if the original constant was not. These conversions
8430 have implementation defined behavior and retaining the TREE_OVERFLOW
8431 flag here would confuse later passes such as VRP. */
8432 tree
8433 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8434 tree type, tree op0)
8436 tree res = fold_unary_loc (loc, code, type, op0);
8437 if (res
8438 && TREE_CODE (res) == INTEGER_CST
8439 && TREE_CODE (op0) == INTEGER_CST
8440 && CONVERT_EXPR_CODE_P (code))
8441 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8443 return res;
8446 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8447 operands OP0 and OP1. LOC is the location of the resulting expression.
8448 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8449 Return the folded expression if folding is successful. Otherwise,
8450 return NULL_TREE. */
8451 static tree
8452 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8453 tree arg0, tree arg1, tree op0, tree op1)
8455 tree tem;
8457 /* We only do these simplifications if we are optimizing. */
8458 if (!optimize)
8459 return NULL_TREE;
8461 /* Check for things like (A || B) && (A || C). We can convert this
8462 to A || (B && C). Note that either operator can be any of the four
8463 truth and/or operations and the transformation will still be
8464 valid. Also note that we only care about order for the
8465 ANDIF and ORIF operators. If B contains side effects, this
8466 might change the truth-value of A. */
8467 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8468 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8469 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8470 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8471 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8472 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8474 tree a00 = TREE_OPERAND (arg0, 0);
8475 tree a01 = TREE_OPERAND (arg0, 1);
8476 tree a10 = TREE_OPERAND (arg1, 0);
8477 tree a11 = TREE_OPERAND (arg1, 1);
8478 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8479 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8480 && (code == TRUTH_AND_EXPR
8481 || code == TRUTH_OR_EXPR));
8483 if (operand_equal_p (a00, a10, 0))
8484 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8485 fold_build2_loc (loc, code, type, a01, a11));
8486 else if (commutative && operand_equal_p (a00, a11, 0))
8487 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8488 fold_build2_loc (loc, code, type, a01, a10));
8489 else if (commutative && operand_equal_p (a01, a10, 0))
8490 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8491 fold_build2_loc (loc, code, type, a00, a11));
8493 /* This case if tricky because we must either have commutative
8494 operators or else A10 must not have side-effects. */
8496 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8497 && operand_equal_p (a01, a11, 0))
8498 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8499 fold_build2_loc (loc, code, type, a00, a10),
8500 a01);
8503 /* See if we can build a range comparison. */
8504 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8505 return tem;
8507 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8508 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8510 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8511 if (tem)
8512 return fold_build2_loc (loc, code, type, tem, arg1);
8515 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8516 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8518 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8519 if (tem)
8520 return fold_build2_loc (loc, code, type, arg0, tem);
8523 /* Check for the possibility of merging component references. If our
8524 lhs is another similar operation, try to merge its rhs with our
8525 rhs. Then try to merge our lhs and rhs. */
8526 if (TREE_CODE (arg0) == code
8527 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8528 TREE_OPERAND (arg0, 1), arg1)))
8529 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8531 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8532 return tem;
8534 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8535 && (code == TRUTH_AND_EXPR
8536 || code == TRUTH_ANDIF_EXPR
8537 || code == TRUTH_OR_EXPR
8538 || code == TRUTH_ORIF_EXPR))
8540 enum tree_code ncode, icode;
8542 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8543 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8544 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8546 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8547 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8548 We don't want to pack more than two leafs to a non-IF AND/OR
8549 expression.
8550 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8551 equal to IF-CODE, then we don't want to add right-hand operand.
8552 If the inner right-hand side of left-hand operand has
8553 side-effects, or isn't simple, then we can't add to it,
8554 as otherwise we might destroy if-sequence. */
8555 if (TREE_CODE (arg0) == icode
8556 && simple_operand_p_2 (arg1)
8557 /* Needed for sequence points to handle trappings, and
8558 side-effects. */
8559 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8561 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8562 arg1);
8563 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8564 tem);
8566 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8567 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8568 else if (TREE_CODE (arg1) == icode
8569 && simple_operand_p_2 (arg0)
8570 /* Needed for sequence points to handle trappings, and
8571 side-effects. */
8572 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8574 tem = fold_build2_loc (loc, ncode, type,
8575 arg0, TREE_OPERAND (arg1, 0));
8576 return fold_build2_loc (loc, icode, type, tem,
8577 TREE_OPERAND (arg1, 1));
8579 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8580 into (A OR B).
8581 For sequence point consistancy, we need to check for trapping,
8582 and side-effects. */
8583 else if (code == icode && simple_operand_p_2 (arg0)
8584 && simple_operand_p_2 (arg1))
8585 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8588 return NULL_TREE;
8591 /* Fold a binary expression of code CODE and type TYPE with operands
8592 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8593 Return the folded expression if folding is successful. Otherwise,
8594 return NULL_TREE. */
8596 static tree
8597 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8599 enum tree_code compl_code;
8601 if (code == MIN_EXPR)
8602 compl_code = MAX_EXPR;
8603 else if (code == MAX_EXPR)
8604 compl_code = MIN_EXPR;
8605 else
8606 gcc_unreachable ();
8608 /* MIN (MAX (a, b), b) == b. */
8609 if (TREE_CODE (op0) == compl_code
8610 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8611 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8613 /* MIN (MAX (b, a), b) == b. */
8614 if (TREE_CODE (op0) == compl_code
8615 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8616 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8617 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8619 /* MIN (a, MAX (a, b)) == a. */
8620 if (TREE_CODE (op1) == compl_code
8621 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8622 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8623 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8625 /* MIN (a, MAX (b, a)) == a. */
8626 if (TREE_CODE (op1) == compl_code
8627 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8628 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8629 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8631 return NULL_TREE;
8634 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8635 by changing CODE to reduce the magnitude of constants involved in
8636 ARG0 of the comparison.
8637 Returns a canonicalized comparison tree if a simplification was
8638 possible, otherwise returns NULL_TREE.
8639 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8640 valid if signed overflow is undefined. */
8642 static tree
8643 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8644 tree arg0, tree arg1,
8645 bool *strict_overflow_p)
8647 enum tree_code code0 = TREE_CODE (arg0);
8648 tree t, cst0 = NULL_TREE;
8649 int sgn0;
8650 bool swap = false;
8652 /* Match A +- CST code arg1 and CST code arg1. We can change the
8653 first form only if overflow is undefined. */
8654 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8655 /* In principle pointers also have undefined overflow behavior,
8656 but that causes problems elsewhere. */
8657 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8658 && (code0 == MINUS_EXPR
8659 || code0 == PLUS_EXPR)
8660 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8661 || code0 == INTEGER_CST))
8662 return NULL_TREE;
8664 /* Identify the constant in arg0 and its sign. */
8665 if (code0 == INTEGER_CST)
8666 cst0 = arg0;
8667 else
8668 cst0 = TREE_OPERAND (arg0, 1);
8669 sgn0 = tree_int_cst_sgn (cst0);
8671 /* Overflowed constants and zero will cause problems. */
8672 if (integer_zerop (cst0)
8673 || TREE_OVERFLOW (cst0))
8674 return NULL_TREE;
8676 /* See if we can reduce the magnitude of the constant in
8677 arg0 by changing the comparison code. */
8678 if (code0 == INTEGER_CST)
8680 /* CST <= arg1 -> CST-1 < arg1. */
8681 if (code == LE_EXPR && sgn0 == 1)
8682 code = LT_EXPR;
8683 /* -CST < arg1 -> -CST-1 <= arg1. */
8684 else if (code == LT_EXPR && sgn0 == -1)
8685 code = LE_EXPR;
8686 /* CST > arg1 -> CST-1 >= arg1. */
8687 else if (code == GT_EXPR && sgn0 == 1)
8688 code = GE_EXPR;
8689 /* -CST >= arg1 -> -CST-1 > arg1. */
8690 else if (code == GE_EXPR && sgn0 == -1)
8691 code = GT_EXPR;
8692 else
8693 return NULL_TREE;
8694 /* arg1 code' CST' might be more canonical. */
8695 swap = true;
8697 else
8699 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8700 if (code == LT_EXPR
8701 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8702 code = LE_EXPR;
8703 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8704 else if (code == GT_EXPR
8705 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8706 code = GE_EXPR;
8707 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8708 else if (code == LE_EXPR
8709 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8710 code = LT_EXPR;
8711 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8712 else if (code == GE_EXPR
8713 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8714 code = GT_EXPR;
8715 else
8716 return NULL_TREE;
8717 *strict_overflow_p = true;
8720 /* Now build the constant reduced in magnitude. But not if that
8721 would produce one outside of its types range. */
8722 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8723 && ((sgn0 == 1
8724 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8725 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8726 || (sgn0 == -1
8727 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8728 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8729 /* We cannot swap the comparison here as that would cause us to
8730 endlessly recurse. */
8731 return NULL_TREE;
8733 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8734 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8735 if (code0 != INTEGER_CST)
8736 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8737 t = fold_convert (TREE_TYPE (arg1), t);
8739 /* If swapping might yield to a more canonical form, do so. */
8740 if (swap)
8741 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8742 else
8743 return fold_build2_loc (loc, code, type, t, arg1);
8746 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8747 overflow further. Try to decrease the magnitude of constants involved
8748 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8749 and put sole constants at the second argument position.
8750 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8752 static tree
8753 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8754 tree arg0, tree arg1)
8756 tree t;
8757 bool strict_overflow_p;
8758 const char * const warnmsg = G_("assuming signed overflow does not occur "
8759 "when reducing constant in comparison");
8761 /* Try canonicalization by simplifying arg0. */
8762 strict_overflow_p = false;
8763 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8764 &strict_overflow_p);
8765 if (t)
8767 if (strict_overflow_p)
8768 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8769 return t;
8772 /* Try canonicalization by simplifying arg1 using the swapped
8773 comparison. */
8774 code = swap_tree_comparison (code);
8775 strict_overflow_p = false;
8776 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8777 &strict_overflow_p);
8778 if (t && strict_overflow_p)
8779 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8780 return t;
8783 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8784 space. This is used to avoid issuing overflow warnings for
8785 expressions like &p->x which can not wrap. */
8787 static bool
8788 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8790 double_int di_offset, total;
8792 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8793 return true;
8795 if (bitpos < 0)
8796 return true;
8798 if (offset == NULL_TREE)
8799 di_offset = double_int_zero;
8800 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8801 return true;
8802 else
8803 di_offset = TREE_INT_CST (offset);
8805 bool overflow;
8806 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8807 total = di_offset.add_with_sign (units, true, &overflow);
8808 if (overflow)
8809 return true;
8811 if (total.high != 0)
8812 return true;
8814 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8815 if (size <= 0)
8816 return true;
8818 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8819 array. */
8820 if (TREE_CODE (base) == ADDR_EXPR)
8822 HOST_WIDE_INT base_size;
8824 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8825 if (base_size > 0 && size < base_size)
8826 size = base_size;
8829 return total.low > (unsigned HOST_WIDE_INT) size;
8832 /* Subroutine of fold_binary. This routine performs all of the
8833 transformations that are common to the equality/inequality
8834 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8835 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8836 fold_binary should call fold_binary. Fold a comparison with
8837 tree code CODE and type TYPE with operands OP0 and OP1. Return
8838 the folded comparison or NULL_TREE. */
8840 static tree
8841 fold_comparison (location_t loc, enum tree_code code, tree type,
8842 tree op0, tree op1)
8844 tree arg0, arg1, tem;
8846 arg0 = op0;
8847 arg1 = op1;
8849 STRIP_SIGN_NOPS (arg0);
8850 STRIP_SIGN_NOPS (arg1);
8852 tem = fold_relational_const (code, type, arg0, arg1);
8853 if (tem != NULL_TREE)
8854 return tem;
8856 /* If one arg is a real or integer constant, put it last. */
8857 if (tree_swap_operands_p (arg0, arg1, true))
8858 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8860 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8861 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8862 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8863 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8864 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8865 && (TREE_CODE (arg1) == INTEGER_CST
8866 && !TREE_OVERFLOW (arg1)))
8868 tree const1 = TREE_OPERAND (arg0, 1);
8869 tree const2 = arg1;
8870 tree variable = TREE_OPERAND (arg0, 0);
8871 tree lhs;
8872 int lhs_add;
8873 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8875 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8876 TREE_TYPE (arg1), const2, const1);
8878 /* If the constant operation overflowed this can be
8879 simplified as a comparison against INT_MAX/INT_MIN. */
8880 if (TREE_CODE (lhs) == INTEGER_CST
8881 && TREE_OVERFLOW (lhs))
8883 int const1_sgn = tree_int_cst_sgn (const1);
8884 enum tree_code code2 = code;
8886 /* Get the sign of the constant on the lhs if the
8887 operation were VARIABLE + CONST1. */
8888 if (TREE_CODE (arg0) == MINUS_EXPR)
8889 const1_sgn = -const1_sgn;
8891 /* The sign of the constant determines if we overflowed
8892 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8893 Canonicalize to the INT_MIN overflow by swapping the comparison
8894 if necessary. */
8895 if (const1_sgn == -1)
8896 code2 = swap_tree_comparison (code);
8898 /* We now can look at the canonicalized case
8899 VARIABLE + 1 CODE2 INT_MIN
8900 and decide on the result. */
8901 if (code2 == LT_EXPR
8902 || code2 == LE_EXPR
8903 || code2 == EQ_EXPR)
8904 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8905 else if (code2 == NE_EXPR
8906 || code2 == GE_EXPR
8907 || code2 == GT_EXPR)
8908 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8911 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8912 && (TREE_CODE (lhs) != INTEGER_CST
8913 || !TREE_OVERFLOW (lhs)))
8915 if (code != EQ_EXPR && code != NE_EXPR)
8916 fold_overflow_warning ("assuming signed overflow does not occur "
8917 "when changing X +- C1 cmp C2 to "
8918 "X cmp C1 +- C2",
8919 WARN_STRICT_OVERFLOW_COMPARISON);
8920 return fold_build2_loc (loc, code, type, variable, lhs);
8924 /* For comparisons of pointers we can decompose it to a compile time
8925 comparison of the base objects and the offsets into the object.
8926 This requires at least one operand being an ADDR_EXPR or a
8927 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8928 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8929 && (TREE_CODE (arg0) == ADDR_EXPR
8930 || TREE_CODE (arg1) == ADDR_EXPR
8931 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8932 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8934 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8935 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8936 enum machine_mode mode;
8937 int volatilep, unsignedp;
8938 bool indirect_base0 = false, indirect_base1 = false;
8940 /* Get base and offset for the access. Strip ADDR_EXPR for
8941 get_inner_reference, but put it back by stripping INDIRECT_REF
8942 off the base object if possible. indirect_baseN will be true
8943 if baseN is not an address but refers to the object itself. */
8944 base0 = arg0;
8945 if (TREE_CODE (arg0) == ADDR_EXPR)
8947 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8948 &bitsize, &bitpos0, &offset0, &mode,
8949 &unsignedp, &volatilep, false);
8950 if (TREE_CODE (base0) == INDIRECT_REF)
8951 base0 = TREE_OPERAND (base0, 0);
8952 else
8953 indirect_base0 = true;
8955 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8957 base0 = TREE_OPERAND (arg0, 0);
8958 STRIP_SIGN_NOPS (base0);
8959 if (TREE_CODE (base0) == ADDR_EXPR)
8961 base0 = TREE_OPERAND (base0, 0);
8962 indirect_base0 = true;
8964 offset0 = TREE_OPERAND (arg0, 1);
8965 if (host_integerp (offset0, 0))
8967 HOST_WIDE_INT off = size_low_cst (offset0);
8968 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8969 * BITS_PER_UNIT)
8970 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8972 bitpos0 = off * BITS_PER_UNIT;
8973 offset0 = NULL_TREE;
8978 base1 = arg1;
8979 if (TREE_CODE (arg1) == ADDR_EXPR)
8981 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8982 &bitsize, &bitpos1, &offset1, &mode,
8983 &unsignedp, &volatilep, false);
8984 if (TREE_CODE (base1) == INDIRECT_REF)
8985 base1 = TREE_OPERAND (base1, 0);
8986 else
8987 indirect_base1 = true;
8989 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8991 base1 = TREE_OPERAND (arg1, 0);
8992 STRIP_SIGN_NOPS (base1);
8993 if (TREE_CODE (base1) == ADDR_EXPR)
8995 base1 = TREE_OPERAND (base1, 0);
8996 indirect_base1 = true;
8998 offset1 = TREE_OPERAND (arg1, 1);
8999 if (host_integerp (offset1, 0))
9001 HOST_WIDE_INT off = size_low_cst (offset1);
9002 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9003 * BITS_PER_UNIT)
9004 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9006 bitpos1 = off * BITS_PER_UNIT;
9007 offset1 = NULL_TREE;
9012 /* A local variable can never be pointed to by
9013 the default SSA name of an incoming parameter. */
9014 if ((TREE_CODE (arg0) == ADDR_EXPR
9015 && indirect_base0
9016 && TREE_CODE (base0) == VAR_DECL
9017 && auto_var_in_fn_p (base0, current_function_decl)
9018 && !indirect_base1
9019 && TREE_CODE (base1) == SSA_NAME
9020 && SSA_NAME_IS_DEFAULT_DEF (base1)
9021 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9022 || (TREE_CODE (arg1) == ADDR_EXPR
9023 && indirect_base1
9024 && TREE_CODE (base1) == VAR_DECL
9025 && auto_var_in_fn_p (base1, current_function_decl)
9026 && !indirect_base0
9027 && TREE_CODE (base0) == SSA_NAME
9028 && SSA_NAME_IS_DEFAULT_DEF (base0)
9029 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9031 if (code == NE_EXPR)
9032 return constant_boolean_node (1, type);
9033 else if (code == EQ_EXPR)
9034 return constant_boolean_node (0, type);
9036 /* If we have equivalent bases we might be able to simplify. */
9037 else if (indirect_base0 == indirect_base1
9038 && operand_equal_p (base0, base1, 0))
9040 /* We can fold this expression to a constant if the non-constant
9041 offset parts are equal. */
9042 if ((offset0 == offset1
9043 || (offset0 && offset1
9044 && operand_equal_p (offset0, offset1, 0)))
9045 && (code == EQ_EXPR
9046 || code == NE_EXPR
9047 || (indirect_base0 && DECL_P (base0))
9048 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9051 if (code != EQ_EXPR
9052 && code != NE_EXPR
9053 && bitpos0 != bitpos1
9054 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9055 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9056 fold_overflow_warning (("assuming pointer wraparound does not "
9057 "occur when comparing P +- C1 with "
9058 "P +- C2"),
9059 WARN_STRICT_OVERFLOW_CONDITIONAL);
9061 switch (code)
9063 case EQ_EXPR:
9064 return constant_boolean_node (bitpos0 == bitpos1, type);
9065 case NE_EXPR:
9066 return constant_boolean_node (bitpos0 != bitpos1, type);
9067 case LT_EXPR:
9068 return constant_boolean_node (bitpos0 < bitpos1, type);
9069 case LE_EXPR:
9070 return constant_boolean_node (bitpos0 <= bitpos1, type);
9071 case GE_EXPR:
9072 return constant_boolean_node (bitpos0 >= bitpos1, type);
9073 case GT_EXPR:
9074 return constant_boolean_node (bitpos0 > bitpos1, type);
9075 default:;
9078 /* We can simplify the comparison to a comparison of the variable
9079 offset parts if the constant offset parts are equal.
9080 Be careful to use signed sizetype here because otherwise we
9081 mess with array offsets in the wrong way. This is possible
9082 because pointer arithmetic is restricted to retain within an
9083 object and overflow on pointer differences is undefined as of
9084 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9085 else if (bitpos0 == bitpos1
9086 && ((code == EQ_EXPR || code == NE_EXPR)
9087 || (indirect_base0 && DECL_P (base0))
9088 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9090 /* By converting to signed sizetype we cover middle-end pointer
9091 arithmetic which operates on unsigned pointer types of size
9092 type size and ARRAY_REF offsets which are properly sign or
9093 zero extended from their type in case it is narrower than
9094 sizetype. */
9095 if (offset0 == NULL_TREE)
9096 offset0 = build_int_cst (ssizetype, 0);
9097 else
9098 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9099 if (offset1 == NULL_TREE)
9100 offset1 = build_int_cst (ssizetype, 0);
9101 else
9102 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9104 if (code != EQ_EXPR
9105 && code != NE_EXPR
9106 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9107 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9108 fold_overflow_warning (("assuming pointer wraparound does not "
9109 "occur when comparing P +- C1 with "
9110 "P +- C2"),
9111 WARN_STRICT_OVERFLOW_COMPARISON);
9113 return fold_build2_loc (loc, code, type, offset0, offset1);
9116 /* For non-equal bases we can simplify if they are addresses
9117 of local binding decls or constants. */
9118 else if (indirect_base0 && indirect_base1
9119 /* We know that !operand_equal_p (base0, base1, 0)
9120 because the if condition was false. But make
9121 sure two decls are not the same. */
9122 && base0 != base1
9123 && TREE_CODE (arg0) == ADDR_EXPR
9124 && TREE_CODE (arg1) == ADDR_EXPR
9125 && (((TREE_CODE (base0) == VAR_DECL
9126 || TREE_CODE (base0) == PARM_DECL)
9127 && (targetm.binds_local_p (base0)
9128 || CONSTANT_CLASS_P (base1)))
9129 || CONSTANT_CLASS_P (base0))
9130 && (((TREE_CODE (base1) == VAR_DECL
9131 || TREE_CODE (base1) == PARM_DECL)
9132 && (targetm.binds_local_p (base1)
9133 || CONSTANT_CLASS_P (base0)))
9134 || CONSTANT_CLASS_P (base1)))
9136 if (code == EQ_EXPR)
9137 return omit_two_operands_loc (loc, type, boolean_false_node,
9138 arg0, arg1);
9139 else if (code == NE_EXPR)
9140 return omit_two_operands_loc (loc, type, boolean_true_node,
9141 arg0, arg1);
9143 /* For equal offsets we can simplify to a comparison of the
9144 base addresses. */
9145 else if (bitpos0 == bitpos1
9146 && (indirect_base0
9147 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9148 && (indirect_base1
9149 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9150 && ((offset0 == offset1)
9151 || (offset0 && offset1
9152 && operand_equal_p (offset0, offset1, 0))))
9154 if (indirect_base0)
9155 base0 = build_fold_addr_expr_loc (loc, base0);
9156 if (indirect_base1)
9157 base1 = build_fold_addr_expr_loc (loc, base1);
9158 return fold_build2_loc (loc, code, type, base0, base1);
9162 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9163 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9164 the resulting offset is smaller in absolute value than the
9165 original one. */
9166 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9167 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9168 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9169 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9170 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9171 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9172 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9174 tree const1 = TREE_OPERAND (arg0, 1);
9175 tree const2 = TREE_OPERAND (arg1, 1);
9176 tree variable1 = TREE_OPERAND (arg0, 0);
9177 tree variable2 = TREE_OPERAND (arg1, 0);
9178 tree cst;
9179 const char * const warnmsg = G_("assuming signed overflow does not "
9180 "occur when combining constants around "
9181 "a comparison");
9183 /* Put the constant on the side where it doesn't overflow and is
9184 of lower absolute value than before. */
9185 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9186 ? MINUS_EXPR : PLUS_EXPR,
9187 const2, const1);
9188 if (!TREE_OVERFLOW (cst)
9189 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9191 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9192 return fold_build2_loc (loc, code, type,
9193 variable1,
9194 fold_build2_loc (loc,
9195 TREE_CODE (arg1), TREE_TYPE (arg1),
9196 variable2, cst));
9199 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9200 ? MINUS_EXPR : PLUS_EXPR,
9201 const1, const2);
9202 if (!TREE_OVERFLOW (cst)
9203 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9205 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9206 return fold_build2_loc (loc, code, type,
9207 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9208 variable1, cst),
9209 variable2);
9213 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9214 signed arithmetic case. That form is created by the compiler
9215 often enough for folding it to be of value. One example is in
9216 computing loop trip counts after Operator Strength Reduction. */
9217 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9218 && TREE_CODE (arg0) == MULT_EXPR
9219 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9220 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9221 && integer_zerop (arg1))
9223 tree const1 = TREE_OPERAND (arg0, 1);
9224 tree const2 = arg1; /* zero */
9225 tree variable1 = TREE_OPERAND (arg0, 0);
9226 enum tree_code cmp_code = code;
9228 /* Handle unfolded multiplication by zero. */
9229 if (integer_zerop (const1))
9230 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9232 fold_overflow_warning (("assuming signed overflow does not occur when "
9233 "eliminating multiplication in comparison "
9234 "with zero"),
9235 WARN_STRICT_OVERFLOW_COMPARISON);
9237 /* If const1 is negative we swap the sense of the comparison. */
9238 if (tree_int_cst_sgn (const1) < 0)
9239 cmp_code = swap_tree_comparison (cmp_code);
9241 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9244 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9245 if (tem)
9246 return tem;
9248 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9250 tree targ0 = strip_float_extensions (arg0);
9251 tree targ1 = strip_float_extensions (arg1);
9252 tree newtype = TREE_TYPE (targ0);
9254 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9255 newtype = TREE_TYPE (targ1);
9257 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9258 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9259 return fold_build2_loc (loc, code, type,
9260 fold_convert_loc (loc, newtype, targ0),
9261 fold_convert_loc (loc, newtype, targ1));
9263 /* (-a) CMP (-b) -> b CMP a */
9264 if (TREE_CODE (arg0) == NEGATE_EXPR
9265 && TREE_CODE (arg1) == NEGATE_EXPR)
9266 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9267 TREE_OPERAND (arg0, 0));
9269 if (TREE_CODE (arg1) == REAL_CST)
9271 REAL_VALUE_TYPE cst;
9272 cst = TREE_REAL_CST (arg1);
9274 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9275 if (TREE_CODE (arg0) == NEGATE_EXPR)
9276 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9277 TREE_OPERAND (arg0, 0),
9278 build_real (TREE_TYPE (arg1),
9279 real_value_negate (&cst)));
9281 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9282 /* a CMP (-0) -> a CMP 0 */
9283 if (REAL_VALUE_MINUS_ZERO (cst))
9284 return fold_build2_loc (loc, code, type, arg0,
9285 build_real (TREE_TYPE (arg1), dconst0));
9287 /* x != NaN is always true, other ops are always false. */
9288 if (REAL_VALUE_ISNAN (cst)
9289 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9291 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9292 return omit_one_operand_loc (loc, type, tem, arg0);
9295 /* Fold comparisons against infinity. */
9296 if (REAL_VALUE_ISINF (cst)
9297 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9299 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9300 if (tem != NULL_TREE)
9301 return tem;
9305 /* If this is a comparison of a real constant with a PLUS_EXPR
9306 or a MINUS_EXPR of a real constant, we can convert it into a
9307 comparison with a revised real constant as long as no overflow
9308 occurs when unsafe_math_optimizations are enabled. */
9309 if (flag_unsafe_math_optimizations
9310 && TREE_CODE (arg1) == REAL_CST
9311 && (TREE_CODE (arg0) == PLUS_EXPR
9312 || TREE_CODE (arg0) == MINUS_EXPR)
9313 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9314 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9315 ? MINUS_EXPR : PLUS_EXPR,
9316 arg1, TREE_OPERAND (arg0, 1)))
9317 && !TREE_OVERFLOW (tem))
9318 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9320 /* Likewise, we can simplify a comparison of a real constant with
9321 a MINUS_EXPR whose first operand is also a real constant, i.e.
9322 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9323 floating-point types only if -fassociative-math is set. */
9324 if (flag_associative_math
9325 && TREE_CODE (arg1) == REAL_CST
9326 && TREE_CODE (arg0) == MINUS_EXPR
9327 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9328 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9329 arg1))
9330 && !TREE_OVERFLOW (tem))
9331 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9332 TREE_OPERAND (arg0, 1), tem);
9334 /* Fold comparisons against built-in math functions. */
9335 if (TREE_CODE (arg1) == REAL_CST
9336 && flag_unsafe_math_optimizations
9337 && ! flag_errno_math)
9339 enum built_in_function fcode = builtin_mathfn_code (arg0);
9341 if (fcode != END_BUILTINS)
9343 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9344 if (tem != NULL_TREE)
9345 return tem;
9350 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9351 && CONVERT_EXPR_P (arg0))
9353 /* If we are widening one operand of an integer comparison,
9354 see if the other operand is similarly being widened. Perhaps we
9355 can do the comparison in the narrower type. */
9356 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9357 if (tem)
9358 return tem;
9360 /* Or if we are changing signedness. */
9361 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9362 if (tem)
9363 return tem;
9366 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9367 constant, we can simplify it. */
9368 if (TREE_CODE (arg1) == INTEGER_CST
9369 && (TREE_CODE (arg0) == MIN_EXPR
9370 || TREE_CODE (arg0) == MAX_EXPR)
9371 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9373 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9374 if (tem)
9375 return tem;
9378 /* Simplify comparison of something with itself. (For IEEE
9379 floating-point, we can only do some of these simplifications.) */
9380 if (operand_equal_p (arg0, arg1, 0))
9382 switch (code)
9384 case EQ_EXPR:
9385 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9386 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9387 return constant_boolean_node (1, type);
9388 break;
9390 case GE_EXPR:
9391 case LE_EXPR:
9392 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9393 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9394 return constant_boolean_node (1, type);
9395 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9397 case NE_EXPR:
9398 /* For NE, we can only do this simplification if integer
9399 or we don't honor IEEE floating point NaNs. */
9400 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9401 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9402 break;
9403 /* ... fall through ... */
9404 case GT_EXPR:
9405 case LT_EXPR:
9406 return constant_boolean_node (0, type);
9407 default:
9408 gcc_unreachable ();
9412 /* If we are comparing an expression that just has comparisons
9413 of two integer values, arithmetic expressions of those comparisons,
9414 and constants, we can simplify it. There are only three cases
9415 to check: the two values can either be equal, the first can be
9416 greater, or the second can be greater. Fold the expression for
9417 those three values. Since each value must be 0 or 1, we have
9418 eight possibilities, each of which corresponds to the constant 0
9419 or 1 or one of the six possible comparisons.
9421 This handles common cases like (a > b) == 0 but also handles
9422 expressions like ((x > y) - (y > x)) > 0, which supposedly
9423 occur in macroized code. */
9425 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9427 tree cval1 = 0, cval2 = 0;
9428 int save_p = 0;
9430 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9431 /* Don't handle degenerate cases here; they should already
9432 have been handled anyway. */
9433 && cval1 != 0 && cval2 != 0
9434 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9435 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9436 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9437 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9438 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9439 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9440 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9442 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9443 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9445 /* We can't just pass T to eval_subst in case cval1 or cval2
9446 was the same as ARG1. */
9448 tree high_result
9449 = fold_build2_loc (loc, code, type,
9450 eval_subst (loc, arg0, cval1, maxval,
9451 cval2, minval),
9452 arg1);
9453 tree equal_result
9454 = fold_build2_loc (loc, code, type,
9455 eval_subst (loc, arg0, cval1, maxval,
9456 cval2, maxval),
9457 arg1);
9458 tree low_result
9459 = fold_build2_loc (loc, code, type,
9460 eval_subst (loc, arg0, cval1, minval,
9461 cval2, maxval),
9462 arg1);
9464 /* All three of these results should be 0 or 1. Confirm they are.
9465 Then use those values to select the proper code to use. */
9467 if (TREE_CODE (high_result) == INTEGER_CST
9468 && TREE_CODE (equal_result) == INTEGER_CST
9469 && TREE_CODE (low_result) == INTEGER_CST)
9471 /* Make a 3-bit mask with the high-order bit being the
9472 value for `>', the next for '=', and the low for '<'. */
9473 switch ((integer_onep (high_result) * 4)
9474 + (integer_onep (equal_result) * 2)
9475 + integer_onep (low_result))
9477 case 0:
9478 /* Always false. */
9479 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9480 case 1:
9481 code = LT_EXPR;
9482 break;
9483 case 2:
9484 code = EQ_EXPR;
9485 break;
9486 case 3:
9487 code = LE_EXPR;
9488 break;
9489 case 4:
9490 code = GT_EXPR;
9491 break;
9492 case 5:
9493 code = NE_EXPR;
9494 break;
9495 case 6:
9496 code = GE_EXPR;
9497 break;
9498 case 7:
9499 /* Always true. */
9500 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9503 if (save_p)
9505 tem = save_expr (build2 (code, type, cval1, cval2));
9506 SET_EXPR_LOCATION (tem, loc);
9507 return tem;
9509 return fold_build2_loc (loc, code, type, cval1, cval2);
9514 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9515 into a single range test. */
9516 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9517 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9518 && TREE_CODE (arg1) == INTEGER_CST
9519 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9520 && !integer_zerop (TREE_OPERAND (arg0, 1))
9521 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9522 && !TREE_OVERFLOW (arg1))
9524 tem = fold_div_compare (loc, code, type, arg0, arg1);
9525 if (tem != NULL_TREE)
9526 return tem;
9529 /* Fold ~X op ~Y as Y op X. */
9530 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9531 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9533 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9534 return fold_build2_loc (loc, code, type,
9535 fold_convert_loc (loc, cmp_type,
9536 TREE_OPERAND (arg1, 0)),
9537 TREE_OPERAND (arg0, 0));
9540 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9541 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9542 && TREE_CODE (arg1) == INTEGER_CST)
9544 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9545 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9546 TREE_OPERAND (arg0, 0),
9547 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9548 fold_convert_loc (loc, cmp_type, arg1)));
9551 return NULL_TREE;
9555 /* Subroutine of fold_binary. Optimize complex multiplications of the
9556 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9557 argument EXPR represents the expression "z" of type TYPE. */
9559 static tree
9560 fold_mult_zconjz (location_t loc, tree type, tree expr)
9562 tree itype = TREE_TYPE (type);
9563 tree rpart, ipart, tem;
9565 if (TREE_CODE (expr) == COMPLEX_EXPR)
9567 rpart = TREE_OPERAND (expr, 0);
9568 ipart = TREE_OPERAND (expr, 1);
9570 else if (TREE_CODE (expr) == COMPLEX_CST)
9572 rpart = TREE_REALPART (expr);
9573 ipart = TREE_IMAGPART (expr);
9575 else
9577 expr = save_expr (expr);
9578 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9579 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9582 rpart = save_expr (rpart);
9583 ipart = save_expr (ipart);
9584 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9585 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9586 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9587 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9588 build_zero_cst (itype));
9592 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9593 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9594 guarantees that P and N have the same least significant log2(M) bits.
9595 N is not otherwise constrained. In particular, N is not normalized to
9596 0 <= N < M as is common. In general, the precise value of P is unknown.
9597 M is chosen as large as possible such that constant N can be determined.
9599 Returns M and sets *RESIDUE to N.
9601 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9602 account. This is not always possible due to PR 35705.
9605 static unsigned HOST_WIDE_INT
9606 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9607 bool allow_func_align)
9609 enum tree_code code;
9611 *residue = 0;
9613 code = TREE_CODE (expr);
9614 if (code == ADDR_EXPR)
9616 unsigned int bitalign;
9617 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9618 *residue /= BITS_PER_UNIT;
9619 return bitalign / BITS_PER_UNIT;
9621 else if (code == POINTER_PLUS_EXPR)
9623 tree op0, op1;
9624 unsigned HOST_WIDE_INT modulus;
9625 enum tree_code inner_code;
9627 op0 = TREE_OPERAND (expr, 0);
9628 STRIP_NOPS (op0);
9629 modulus = get_pointer_modulus_and_residue (op0, residue,
9630 allow_func_align);
9632 op1 = TREE_OPERAND (expr, 1);
9633 STRIP_NOPS (op1);
9634 inner_code = TREE_CODE (op1);
9635 if (inner_code == INTEGER_CST)
9637 *residue += TREE_INT_CST_LOW (op1);
9638 return modulus;
9640 else if (inner_code == MULT_EXPR)
9642 op1 = TREE_OPERAND (op1, 1);
9643 if (TREE_CODE (op1) == INTEGER_CST)
9645 unsigned HOST_WIDE_INT align;
9647 /* Compute the greatest power-of-2 divisor of op1. */
9648 align = TREE_INT_CST_LOW (op1);
9649 align &= -align;
9651 /* If align is non-zero and less than *modulus, replace
9652 *modulus with align., If align is 0, then either op1 is 0
9653 or the greatest power-of-2 divisor of op1 doesn't fit in an
9654 unsigned HOST_WIDE_INT. In either case, no additional
9655 constraint is imposed. */
9656 if (align)
9657 modulus = MIN (modulus, align);
9659 return modulus;
9664 /* If we get here, we were unable to determine anything useful about the
9665 expression. */
9666 return 1;
9669 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9670 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9672 static bool
9673 vec_cst_ctor_to_array (tree arg, tree *elts)
9675 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9677 if (TREE_CODE (arg) == VECTOR_CST)
9679 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9680 elts[i] = VECTOR_CST_ELT (arg, i);
9682 else if (TREE_CODE (arg) == CONSTRUCTOR)
9684 constructor_elt *elt;
9686 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9687 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9688 return false;
9689 else
9690 elts[i] = elt->value;
9692 else
9693 return false;
9694 for (; i < nelts; i++)
9695 elts[i]
9696 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9697 return true;
9700 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9701 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9702 NULL_TREE otherwise. */
9704 static tree
9705 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9707 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9708 tree *elts;
9709 bool need_ctor = false;
9711 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9712 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9713 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9714 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9715 return NULL_TREE;
9717 elts = XALLOCAVEC (tree, nelts * 3);
9718 if (!vec_cst_ctor_to_array (arg0, elts)
9719 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9720 return NULL_TREE;
9722 for (i = 0; i < nelts; i++)
9724 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9725 need_ctor = true;
9726 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9729 if (need_ctor)
9731 vec<constructor_elt, va_gc> *v;
9732 vec_alloc (v, nelts);
9733 for (i = 0; i < nelts; i++)
9734 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9735 return build_constructor (type, v);
9737 else
9738 return build_vector (type, &elts[2 * nelts]);
9741 /* Try to fold a pointer difference of type TYPE two address expressions of
9742 array references AREF0 and AREF1 using location LOC. Return a
9743 simplified expression for the difference or NULL_TREE. */
9745 static tree
9746 fold_addr_of_array_ref_difference (location_t loc, tree type,
9747 tree aref0, tree aref1)
9749 tree base0 = TREE_OPERAND (aref0, 0);
9750 tree base1 = TREE_OPERAND (aref1, 0);
9751 tree base_offset = build_int_cst (type, 0);
9753 /* If the bases are array references as well, recurse. If the bases
9754 are pointer indirections compute the difference of the pointers.
9755 If the bases are equal, we are set. */
9756 if ((TREE_CODE (base0) == ARRAY_REF
9757 && TREE_CODE (base1) == ARRAY_REF
9758 && (base_offset
9759 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9760 || (INDIRECT_REF_P (base0)
9761 && INDIRECT_REF_P (base1)
9762 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9763 TREE_OPERAND (base0, 0),
9764 TREE_OPERAND (base1, 0))))
9765 || operand_equal_p (base0, base1, 0))
9767 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9768 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9769 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9770 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9771 return fold_build2_loc (loc, PLUS_EXPR, type,
9772 base_offset,
9773 fold_build2_loc (loc, MULT_EXPR, type,
9774 diff, esz));
9776 return NULL_TREE;
9779 /* If the real or vector real constant CST of type TYPE has an exact
9780 inverse, return it, else return NULL. */
9782 static tree
9783 exact_inverse (tree type, tree cst)
9785 REAL_VALUE_TYPE r;
9786 tree unit_type, *elts;
9787 enum machine_mode mode;
9788 unsigned vec_nelts, i;
9790 switch (TREE_CODE (cst))
9792 case REAL_CST:
9793 r = TREE_REAL_CST (cst);
9795 if (exact_real_inverse (TYPE_MODE (type), &r))
9796 return build_real (type, r);
9798 return NULL_TREE;
9800 case VECTOR_CST:
9801 vec_nelts = VECTOR_CST_NELTS (cst);
9802 elts = XALLOCAVEC (tree, vec_nelts);
9803 unit_type = TREE_TYPE (type);
9804 mode = TYPE_MODE (unit_type);
9806 for (i = 0; i < vec_nelts; i++)
9808 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9809 if (!exact_real_inverse (mode, &r))
9810 return NULL_TREE;
9811 elts[i] = build_real (unit_type, r);
9814 return build_vector (type, elts);
9816 default:
9817 return NULL_TREE;
9821 /* Fold a binary expression of code CODE and type TYPE with operands
9822 OP0 and OP1. LOC is the location of the resulting expression.
9823 Return the folded expression if folding is successful. Otherwise,
9824 return NULL_TREE. */
9826 tree
9827 fold_binary_loc (location_t loc,
9828 enum tree_code code, tree type, tree op0, tree op1)
9830 enum tree_code_class kind = TREE_CODE_CLASS (code);
9831 tree arg0, arg1, tem;
9832 tree t1 = NULL_TREE;
9833 bool strict_overflow_p;
9835 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9836 && TREE_CODE_LENGTH (code) == 2
9837 && op0 != NULL_TREE
9838 && op1 != NULL_TREE);
9840 arg0 = op0;
9841 arg1 = op1;
9843 /* Strip any conversions that don't change the mode. This is
9844 safe for every expression, except for a comparison expression
9845 because its signedness is derived from its operands. So, in
9846 the latter case, only strip conversions that don't change the
9847 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9848 preserved.
9850 Note that this is done as an internal manipulation within the
9851 constant folder, in order to find the simplest representation
9852 of the arguments so that their form can be studied. In any
9853 cases, the appropriate type conversions should be put back in
9854 the tree that will get out of the constant folder. */
9856 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9858 STRIP_SIGN_NOPS (arg0);
9859 STRIP_SIGN_NOPS (arg1);
9861 else
9863 STRIP_NOPS (arg0);
9864 STRIP_NOPS (arg1);
9867 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9868 constant but we can't do arithmetic on them. */
9869 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9870 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9871 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9872 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9873 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9874 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9876 if (kind == tcc_binary)
9878 /* Make sure type and arg0 have the same saturating flag. */
9879 gcc_assert (TYPE_SATURATING (type)
9880 == TYPE_SATURATING (TREE_TYPE (arg0)));
9881 tem = const_binop (code, arg0, arg1);
9883 else if (kind == tcc_comparison)
9884 tem = fold_relational_const (code, type, arg0, arg1);
9885 else
9886 tem = NULL_TREE;
9888 if (tem != NULL_TREE)
9890 if (TREE_TYPE (tem) != type)
9891 tem = fold_convert_loc (loc, type, tem);
9892 return tem;
9896 /* If this is a commutative operation, and ARG0 is a constant, move it
9897 to ARG1 to reduce the number of tests below. */
9898 if (commutative_tree_code (code)
9899 && tree_swap_operands_p (arg0, arg1, true))
9900 return fold_build2_loc (loc, code, type, op1, op0);
9902 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9904 First check for cases where an arithmetic operation is applied to a
9905 compound, conditional, or comparison operation. Push the arithmetic
9906 operation inside the compound or conditional to see if any folding
9907 can then be done. Convert comparison to conditional for this purpose.
9908 The also optimizes non-constant cases that used to be done in
9909 expand_expr.
9911 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9912 one of the operands is a comparison and the other is a comparison, a
9913 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9914 code below would make the expression more complex. Change it to a
9915 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9916 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9918 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9919 || code == EQ_EXPR || code == NE_EXPR)
9920 && TREE_CODE (type) != VECTOR_TYPE
9921 && ((truth_value_p (TREE_CODE (arg0))
9922 && (truth_value_p (TREE_CODE (arg1))
9923 || (TREE_CODE (arg1) == BIT_AND_EXPR
9924 && integer_onep (TREE_OPERAND (arg1, 1)))))
9925 || (truth_value_p (TREE_CODE (arg1))
9926 && (truth_value_p (TREE_CODE (arg0))
9927 || (TREE_CODE (arg0) == BIT_AND_EXPR
9928 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9930 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9931 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9932 : TRUTH_XOR_EXPR,
9933 boolean_type_node,
9934 fold_convert_loc (loc, boolean_type_node, arg0),
9935 fold_convert_loc (loc, boolean_type_node, arg1));
9937 if (code == EQ_EXPR)
9938 tem = invert_truthvalue_loc (loc, tem);
9940 return fold_convert_loc (loc, type, tem);
9943 if (TREE_CODE_CLASS (code) == tcc_binary
9944 || TREE_CODE_CLASS (code) == tcc_comparison)
9946 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9948 tem = fold_build2_loc (loc, code, type,
9949 fold_convert_loc (loc, TREE_TYPE (op0),
9950 TREE_OPERAND (arg0, 1)), op1);
9951 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9952 tem);
9954 if (TREE_CODE (arg1) == COMPOUND_EXPR
9955 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9957 tem = fold_build2_loc (loc, code, type, op0,
9958 fold_convert_loc (loc, TREE_TYPE (op1),
9959 TREE_OPERAND (arg1, 1)));
9960 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9961 tem);
9964 if (TREE_CODE (arg0) == COND_EXPR
9965 || TREE_CODE (arg0) == VEC_COND_EXPR
9966 || COMPARISON_CLASS_P (arg0))
9968 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9969 arg0, arg1,
9970 /*cond_first_p=*/1);
9971 if (tem != NULL_TREE)
9972 return tem;
9975 if (TREE_CODE (arg1) == COND_EXPR
9976 || TREE_CODE (arg1) == VEC_COND_EXPR
9977 || COMPARISON_CLASS_P (arg1))
9979 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9980 arg1, arg0,
9981 /*cond_first_p=*/0);
9982 if (tem != NULL_TREE)
9983 return tem;
9987 switch (code)
9989 case MEM_REF:
9990 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9991 if (TREE_CODE (arg0) == ADDR_EXPR
9992 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9994 tree iref = TREE_OPERAND (arg0, 0);
9995 return fold_build2 (MEM_REF, type,
9996 TREE_OPERAND (iref, 0),
9997 int_const_binop (PLUS_EXPR, arg1,
9998 TREE_OPERAND (iref, 1)));
10001 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10002 if (TREE_CODE (arg0) == ADDR_EXPR
10003 && handled_component_p (TREE_OPERAND (arg0, 0)))
10005 tree base;
10006 HOST_WIDE_INT coffset;
10007 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10008 &coffset);
10009 if (!base)
10010 return NULL_TREE;
10011 return fold_build2 (MEM_REF, type,
10012 build_fold_addr_expr (base),
10013 int_const_binop (PLUS_EXPR, arg1,
10014 size_int (coffset)));
10017 return NULL_TREE;
10019 case POINTER_PLUS_EXPR:
10020 /* 0 +p index -> (type)index */
10021 if (integer_zerop (arg0))
10022 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10024 /* PTR +p 0 -> PTR */
10025 if (integer_zerop (arg1))
10026 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10028 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10029 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10030 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10031 return fold_convert_loc (loc, type,
10032 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10033 fold_convert_loc (loc, sizetype,
10034 arg1),
10035 fold_convert_loc (loc, sizetype,
10036 arg0)));
10038 /* (PTR +p B) +p A -> PTR +p (B + A) */
10039 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10040 && !upc_shared_type_p (TREE_TYPE (type)))
10042 tree inner;
10043 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10044 tree arg00 = TREE_OPERAND (arg0, 0);
10045 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10046 arg01, fold_convert_loc (loc, sizetype, arg1));
10047 return fold_convert_loc (loc, type,
10048 fold_build_pointer_plus_loc (loc,
10049 arg00, inner));
10052 /* PTR_CST +p CST -> CST1 */
10053 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10054 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10055 fold_convert_loc (loc, type, arg1));
10057 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10058 of the array. Loop optimizer sometimes produce this type of
10059 expressions. */
10060 if (TREE_CODE (arg0) == ADDR_EXPR)
10062 tem = try_move_mult_to_index (loc, arg0,
10063 fold_convert_loc (loc,
10064 ssizetype, arg1));
10065 if (tem)
10066 return fold_convert_loc (loc, type, tem);
10069 return NULL_TREE;
10071 case PLUS_EXPR:
10072 /* A + (-B) -> A - B */
10073 if (TREE_CODE (arg1) == NEGATE_EXPR)
10074 return fold_build2_loc (loc, MINUS_EXPR, type,
10075 fold_convert_loc (loc, type, arg0),
10076 fold_convert_loc (loc, type,
10077 TREE_OPERAND (arg1, 0)));
10078 /* (-A) + B -> B - A */
10079 if (TREE_CODE (arg0) == NEGATE_EXPR
10080 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10081 return fold_build2_loc (loc, MINUS_EXPR, type,
10082 fold_convert_loc (loc, type, arg1),
10083 fold_convert_loc (loc, type,
10084 TREE_OPERAND (arg0, 0)));
10086 /* Disable further optimizations involving UPC shared pointers,
10087 because integers are not interoperable with shared pointers. */
10088 if ((TREE_TYPE (arg0) && POINTER_TYPE_P (TREE_TYPE (arg0))
10089 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg0))))
10090 || (TREE_TYPE (arg1) && POINTER_TYPE_P (TREE_TYPE (arg1))
10091 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg1)))))
10092 return NULL_TREE;
10094 if (INTEGRAL_TYPE_P (type))
10096 /* Convert ~A + 1 to -A. */
10097 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10098 && integer_onep (arg1))
10099 return fold_build1_loc (loc, NEGATE_EXPR, type,
10100 fold_convert_loc (loc, type,
10101 TREE_OPERAND (arg0, 0)));
10103 /* ~X + X is -1. */
10104 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10105 && !TYPE_OVERFLOW_TRAPS (type))
10107 tree tem = TREE_OPERAND (arg0, 0);
10109 STRIP_NOPS (tem);
10110 if (operand_equal_p (tem, arg1, 0))
10112 t1 = build_int_cst_type (type, -1);
10113 return omit_one_operand_loc (loc, type, t1, arg1);
10117 /* X + ~X is -1. */
10118 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10119 && !TYPE_OVERFLOW_TRAPS (type))
10121 tree tem = TREE_OPERAND (arg1, 0);
10123 STRIP_NOPS (tem);
10124 if (operand_equal_p (arg0, tem, 0))
10126 t1 = build_int_cst_type (type, -1);
10127 return omit_one_operand_loc (loc, type, t1, arg0);
10131 /* X + (X / CST) * -CST is X % CST. */
10132 if (TREE_CODE (arg1) == MULT_EXPR
10133 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10134 && operand_equal_p (arg0,
10135 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10137 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10138 tree cst1 = TREE_OPERAND (arg1, 1);
10139 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10140 cst1, cst0);
10141 if (sum && integer_zerop (sum))
10142 return fold_convert_loc (loc, type,
10143 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10144 TREE_TYPE (arg0), arg0,
10145 cst0));
10149 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10150 one. Make sure the type is not saturating and has the signedness of
10151 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10152 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10153 if ((TREE_CODE (arg0) == MULT_EXPR
10154 || TREE_CODE (arg1) == MULT_EXPR)
10155 && !TYPE_SATURATING (type)
10156 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10157 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10158 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10160 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10161 if (tem)
10162 return tem;
10165 if (! FLOAT_TYPE_P (type))
10167 if (integer_zerop (arg1))
10168 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10170 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10171 with a constant, and the two constants have no bits in common,
10172 we should treat this as a BIT_IOR_EXPR since this may produce more
10173 simplifications. */
10174 if (TREE_CODE (arg0) == BIT_AND_EXPR
10175 && TREE_CODE (arg1) == BIT_AND_EXPR
10176 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10177 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10178 && integer_zerop (const_binop (BIT_AND_EXPR,
10179 TREE_OPERAND (arg0, 1),
10180 TREE_OPERAND (arg1, 1))))
10182 code = BIT_IOR_EXPR;
10183 goto bit_ior;
10186 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10187 (plus (plus (mult) (mult)) (foo)) so that we can
10188 take advantage of the factoring cases below. */
10189 if (TYPE_OVERFLOW_WRAPS (type)
10190 && (((TREE_CODE (arg0) == PLUS_EXPR
10191 || TREE_CODE (arg0) == MINUS_EXPR)
10192 && TREE_CODE (arg1) == MULT_EXPR)
10193 || ((TREE_CODE (arg1) == PLUS_EXPR
10194 || TREE_CODE (arg1) == MINUS_EXPR)
10195 && TREE_CODE (arg0) == MULT_EXPR)))
10197 tree parg0, parg1, parg, marg;
10198 enum tree_code pcode;
10200 if (TREE_CODE (arg1) == MULT_EXPR)
10201 parg = arg0, marg = arg1;
10202 else
10203 parg = arg1, marg = arg0;
10204 pcode = TREE_CODE (parg);
10205 parg0 = TREE_OPERAND (parg, 0);
10206 parg1 = TREE_OPERAND (parg, 1);
10207 STRIP_NOPS (parg0);
10208 STRIP_NOPS (parg1);
10210 if (TREE_CODE (parg0) == MULT_EXPR
10211 && TREE_CODE (parg1) != MULT_EXPR)
10212 return fold_build2_loc (loc, pcode, type,
10213 fold_build2_loc (loc, PLUS_EXPR, type,
10214 fold_convert_loc (loc, type,
10215 parg0),
10216 fold_convert_loc (loc, type,
10217 marg)),
10218 fold_convert_loc (loc, type, parg1));
10219 if (TREE_CODE (parg0) != MULT_EXPR
10220 && TREE_CODE (parg1) == MULT_EXPR)
10221 return
10222 fold_build2_loc (loc, PLUS_EXPR, type,
10223 fold_convert_loc (loc, type, parg0),
10224 fold_build2_loc (loc, pcode, type,
10225 fold_convert_loc (loc, type, marg),
10226 fold_convert_loc (loc, type,
10227 parg1)));
10230 else
10232 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10233 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10234 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10236 /* Likewise if the operands are reversed. */
10237 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10238 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10240 /* Convert X + -C into X - C. */
10241 if (TREE_CODE (arg1) == REAL_CST
10242 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10244 tem = fold_negate_const (arg1, type);
10245 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10246 return fold_build2_loc (loc, MINUS_EXPR, type,
10247 fold_convert_loc (loc, type, arg0),
10248 fold_convert_loc (loc, type, tem));
10251 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10252 to __complex__ ( x, y ). This is not the same for SNaNs or
10253 if signed zeros are involved. */
10254 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10255 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10256 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10258 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10259 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10260 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10261 bool arg0rz = false, arg0iz = false;
10262 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10263 || (arg0i && (arg0iz = real_zerop (arg0i))))
10265 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10266 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10267 if (arg0rz && arg1i && real_zerop (arg1i))
10269 tree rp = arg1r ? arg1r
10270 : build1 (REALPART_EXPR, rtype, arg1);
10271 tree ip = arg0i ? arg0i
10272 : build1 (IMAGPART_EXPR, rtype, arg0);
10273 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10275 else if (arg0iz && arg1r && real_zerop (arg1r))
10277 tree rp = arg0r ? arg0r
10278 : build1 (REALPART_EXPR, rtype, arg0);
10279 tree ip = arg1i ? arg1i
10280 : build1 (IMAGPART_EXPR, rtype, arg1);
10281 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10286 if (flag_unsafe_math_optimizations
10287 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10288 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10289 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10290 return tem;
10292 /* Convert x+x into x*2.0. */
10293 if (operand_equal_p (arg0, arg1, 0)
10294 && SCALAR_FLOAT_TYPE_P (type))
10295 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10296 build_real (type, dconst2));
10298 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10299 We associate floats only if the user has specified
10300 -fassociative-math. */
10301 if (flag_associative_math
10302 && TREE_CODE (arg1) == PLUS_EXPR
10303 && TREE_CODE (arg0) != MULT_EXPR)
10305 tree tree10 = TREE_OPERAND (arg1, 0);
10306 tree tree11 = TREE_OPERAND (arg1, 1);
10307 if (TREE_CODE (tree11) == MULT_EXPR
10308 && TREE_CODE (tree10) == MULT_EXPR)
10310 tree tree0;
10311 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10312 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10315 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10316 We associate floats only if the user has specified
10317 -fassociative-math. */
10318 if (flag_associative_math
10319 && TREE_CODE (arg0) == PLUS_EXPR
10320 && TREE_CODE (arg1) != MULT_EXPR)
10322 tree tree00 = TREE_OPERAND (arg0, 0);
10323 tree tree01 = TREE_OPERAND (arg0, 1);
10324 if (TREE_CODE (tree01) == MULT_EXPR
10325 && TREE_CODE (tree00) == MULT_EXPR)
10327 tree tree0;
10328 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10329 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10334 bit_rotate:
10335 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10336 is a rotate of A by C1 bits. */
10337 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10338 is a rotate of A by B bits. */
10340 enum tree_code code0, code1;
10341 tree rtype;
10342 code0 = TREE_CODE (arg0);
10343 code1 = TREE_CODE (arg1);
10344 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10345 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10346 && operand_equal_p (TREE_OPERAND (arg0, 0),
10347 TREE_OPERAND (arg1, 0), 0)
10348 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10349 TYPE_UNSIGNED (rtype))
10350 /* Only create rotates in complete modes. Other cases are not
10351 expanded properly. */
10352 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10354 tree tree01, tree11;
10355 enum tree_code code01, code11;
10357 tree01 = TREE_OPERAND (arg0, 1);
10358 tree11 = TREE_OPERAND (arg1, 1);
10359 STRIP_NOPS (tree01);
10360 STRIP_NOPS (tree11);
10361 code01 = TREE_CODE (tree01);
10362 code11 = TREE_CODE (tree11);
10363 if (code01 == INTEGER_CST
10364 && code11 == INTEGER_CST
10365 && TREE_INT_CST_HIGH (tree01) == 0
10366 && TREE_INT_CST_HIGH (tree11) == 0
10367 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10368 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10370 tem = build2_loc (loc, LROTATE_EXPR,
10371 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10372 TREE_OPERAND (arg0, 0),
10373 code0 == LSHIFT_EXPR ? tree01 : tree11);
10374 return fold_convert_loc (loc, type, tem);
10376 else if (code11 == MINUS_EXPR)
10378 tree tree110, tree111;
10379 tree110 = TREE_OPERAND (tree11, 0);
10380 tree111 = TREE_OPERAND (tree11, 1);
10381 STRIP_NOPS (tree110);
10382 STRIP_NOPS (tree111);
10383 if (TREE_CODE (tree110) == INTEGER_CST
10384 && 0 == compare_tree_int (tree110,
10385 TYPE_PRECISION
10386 (TREE_TYPE (TREE_OPERAND
10387 (arg0, 0))))
10388 && operand_equal_p (tree01, tree111, 0))
10389 return
10390 fold_convert_loc (loc, type,
10391 build2 ((code0 == LSHIFT_EXPR
10392 ? LROTATE_EXPR
10393 : RROTATE_EXPR),
10394 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10395 TREE_OPERAND (arg0, 0), tree01));
10397 else if (code01 == MINUS_EXPR)
10399 tree tree010, tree011;
10400 tree010 = TREE_OPERAND (tree01, 0);
10401 tree011 = TREE_OPERAND (tree01, 1);
10402 STRIP_NOPS (tree010);
10403 STRIP_NOPS (tree011);
10404 if (TREE_CODE (tree010) == INTEGER_CST
10405 && 0 == compare_tree_int (tree010,
10406 TYPE_PRECISION
10407 (TREE_TYPE (TREE_OPERAND
10408 (arg0, 0))))
10409 && operand_equal_p (tree11, tree011, 0))
10410 return fold_convert_loc
10411 (loc, type,
10412 build2 ((code0 != LSHIFT_EXPR
10413 ? LROTATE_EXPR
10414 : RROTATE_EXPR),
10415 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10416 TREE_OPERAND (arg0, 0), tree11));
10421 associate:
10422 /* In most languages, can't associate operations on floats through
10423 parentheses. Rather than remember where the parentheses were, we
10424 don't associate floats at all, unless the user has specified
10425 -fassociative-math.
10426 And, we need to make sure type is not saturating. */
10428 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10429 && !TYPE_SATURATING (type))
10431 tree var0, con0, lit0, minus_lit0;
10432 tree var1, con1, lit1, minus_lit1;
10433 tree atype = type;
10434 bool ok = true;
10436 /* Split both trees into variables, constants, and literals. Then
10437 associate each group together, the constants with literals,
10438 then the result with variables. This increases the chances of
10439 literals being recombined later and of generating relocatable
10440 expressions for the sum of a constant and literal. */
10441 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10442 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10443 code == MINUS_EXPR);
10445 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10446 if (code == MINUS_EXPR)
10447 code = PLUS_EXPR;
10449 /* With undefined overflow prefer doing association in a type
10450 which wraps on overflow, if that is one of the operand types. */
10451 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10452 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10454 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10455 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10456 atype = TREE_TYPE (arg0);
10457 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10458 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10459 atype = TREE_TYPE (arg1);
10460 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10463 /* With undefined overflow we can only associate constants with one
10464 variable, and constants whose association doesn't overflow. */
10465 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10466 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10468 if (var0 && var1)
10470 tree tmp0 = var0;
10471 tree tmp1 = var1;
10473 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10474 tmp0 = TREE_OPERAND (tmp0, 0);
10475 if (CONVERT_EXPR_P (tmp0)
10476 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10477 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10478 <= TYPE_PRECISION (atype)))
10479 tmp0 = TREE_OPERAND (tmp0, 0);
10480 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10481 tmp1 = TREE_OPERAND (tmp1, 0);
10482 if (CONVERT_EXPR_P (tmp1)
10483 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10484 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10485 <= TYPE_PRECISION (atype)))
10486 tmp1 = TREE_OPERAND (tmp1, 0);
10487 /* The only case we can still associate with two variables
10488 is if they are the same, modulo negation and bit-pattern
10489 preserving conversions. */
10490 if (!operand_equal_p (tmp0, tmp1, 0))
10491 ok = false;
10495 /* Only do something if we found more than two objects. Otherwise,
10496 nothing has changed and we risk infinite recursion. */
10497 if (ok
10498 && (2 < ((var0 != 0) + (var1 != 0)
10499 + (con0 != 0) + (con1 != 0)
10500 + (lit0 != 0) + (lit1 != 0)
10501 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10503 bool any_overflows = false;
10504 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10505 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10506 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10507 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10508 var0 = associate_trees (loc, var0, var1, code, atype);
10509 con0 = associate_trees (loc, con0, con1, code, atype);
10510 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10511 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10512 code, atype);
10514 /* Preserve the MINUS_EXPR if the negative part of the literal is
10515 greater than the positive part. Otherwise, the multiplicative
10516 folding code (i.e extract_muldiv) may be fooled in case
10517 unsigned constants are subtracted, like in the following
10518 example: ((X*2 + 4) - 8U)/2. */
10519 if (minus_lit0 && lit0)
10521 if (TREE_CODE (lit0) == INTEGER_CST
10522 && TREE_CODE (minus_lit0) == INTEGER_CST
10523 && tree_int_cst_lt (lit0, minus_lit0))
10525 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10526 MINUS_EXPR, atype);
10527 lit0 = 0;
10529 else
10531 lit0 = associate_trees (loc, lit0, minus_lit0,
10532 MINUS_EXPR, atype);
10533 minus_lit0 = 0;
10537 /* Don't introduce overflows through reassociation. */
10538 if (!any_overflows
10539 && ((lit0 && TREE_OVERFLOW (lit0))
10540 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10541 return NULL_TREE;
10543 if (minus_lit0)
10545 if (con0 == 0)
10546 return
10547 fold_convert_loc (loc, type,
10548 associate_trees (loc, var0, minus_lit0,
10549 MINUS_EXPR, atype));
10550 else
10552 con0 = associate_trees (loc, con0, minus_lit0,
10553 MINUS_EXPR, atype);
10554 return
10555 fold_convert_loc (loc, type,
10556 associate_trees (loc, var0, con0,
10557 PLUS_EXPR, atype));
10561 con0 = associate_trees (loc, con0, lit0, code, atype);
10562 return
10563 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10564 code, atype));
10568 return NULL_TREE;
10570 case MINUS_EXPR:
10571 /* Pointer simplifications for subtraction, simple reassociations. */
10572 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10574 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10575 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10576 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10578 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10579 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10580 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10581 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10582 return fold_build2_loc (loc, PLUS_EXPR, type,
10583 fold_build2_loc (loc, MINUS_EXPR, type,
10584 arg00, arg10),
10585 fold_build2_loc (loc, MINUS_EXPR, type,
10586 arg01, arg11));
10588 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10589 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10591 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10592 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10593 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10594 fold_convert_loc (loc, type, arg1));
10595 if (tmp)
10596 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10599 /* A - (-B) -> A + B */
10600 if (TREE_CODE (arg1) == NEGATE_EXPR)
10601 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10602 fold_convert_loc (loc, type,
10603 TREE_OPERAND (arg1, 0)));
10605 /* Disable further optimizations involving UPC shared pointers,
10606 because integers are not interoperable with shared pointers.
10607 (The test below also detects pointer difference between
10608 shared pointers, which cannot be folded. */
10610 if (TREE_TYPE (arg0) && POINTER_TYPE_P (TREE_TYPE (arg0))
10611 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg0))))
10612 return NULL_TREE;
10614 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10615 if (TREE_CODE (arg0) == NEGATE_EXPR
10616 && (FLOAT_TYPE_P (type)
10617 || INTEGRAL_TYPE_P (type))
10618 && negate_expr_p (arg1)
10619 && reorder_operands_p (arg0, arg1))
10620 return fold_build2_loc (loc, MINUS_EXPR, type,
10621 fold_convert_loc (loc, type,
10622 negate_expr (arg1)),
10623 fold_convert_loc (loc, type,
10624 TREE_OPERAND (arg0, 0)));
10625 /* Convert -A - 1 to ~A. */
10626 if (INTEGRAL_TYPE_P (type)
10627 && TREE_CODE (arg0) == NEGATE_EXPR
10628 && integer_onep (arg1)
10629 && !TYPE_OVERFLOW_TRAPS (type))
10630 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10631 fold_convert_loc (loc, type,
10632 TREE_OPERAND (arg0, 0)));
10634 /* Convert -1 - A to ~A. */
10635 if (INTEGRAL_TYPE_P (type)
10636 && integer_all_onesp (arg0))
10637 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10640 /* X - (X / CST) * CST is X % CST. */
10641 if (INTEGRAL_TYPE_P (type)
10642 && TREE_CODE (arg1) == MULT_EXPR
10643 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10644 && operand_equal_p (arg0,
10645 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10646 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10647 TREE_OPERAND (arg1, 1), 0))
10648 return
10649 fold_convert_loc (loc, type,
10650 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10651 arg0, TREE_OPERAND (arg1, 1)));
10653 if (! FLOAT_TYPE_P (type))
10655 if (integer_zerop (arg0))
10656 return negate_expr (fold_convert_loc (loc, type, arg1));
10657 if (integer_zerop (arg1))
10658 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10660 /* Fold A - (A & B) into ~B & A. */
10661 if (!TREE_SIDE_EFFECTS (arg0)
10662 && TREE_CODE (arg1) == BIT_AND_EXPR)
10664 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10666 tree arg10 = fold_convert_loc (loc, type,
10667 TREE_OPERAND (arg1, 0));
10668 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10669 fold_build1_loc (loc, BIT_NOT_EXPR,
10670 type, arg10),
10671 fold_convert_loc (loc, type, arg0));
10673 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10675 tree arg11 = fold_convert_loc (loc,
10676 type, TREE_OPERAND (arg1, 1));
10677 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10678 fold_build1_loc (loc, BIT_NOT_EXPR,
10679 type, arg11),
10680 fold_convert_loc (loc, type, arg0));
10684 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10685 any power of 2 minus 1. */
10686 if (TREE_CODE (arg0) == BIT_AND_EXPR
10687 && TREE_CODE (arg1) == BIT_AND_EXPR
10688 && operand_equal_p (TREE_OPERAND (arg0, 0),
10689 TREE_OPERAND (arg1, 0), 0))
10691 tree mask0 = TREE_OPERAND (arg0, 1);
10692 tree mask1 = TREE_OPERAND (arg1, 1);
10693 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10695 if (operand_equal_p (tem, mask1, 0))
10697 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10698 TREE_OPERAND (arg0, 0), mask1);
10699 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10704 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10705 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10706 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10708 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10709 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10710 (-ARG1 + ARG0) reduces to -ARG1. */
10711 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10712 return negate_expr (fold_convert_loc (loc, type, arg1));
10714 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10715 __complex__ ( x, -y ). This is not the same for SNaNs or if
10716 signed zeros are involved. */
10717 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10718 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10719 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10721 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10722 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10723 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10724 bool arg0rz = false, arg0iz = false;
10725 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10726 || (arg0i && (arg0iz = real_zerop (arg0i))))
10728 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10729 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10730 if (arg0rz && arg1i && real_zerop (arg1i))
10732 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10733 arg1r ? arg1r
10734 : build1 (REALPART_EXPR, rtype, arg1));
10735 tree ip = arg0i ? arg0i
10736 : build1 (IMAGPART_EXPR, rtype, arg0);
10737 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10739 else if (arg0iz && arg1r && real_zerop (arg1r))
10741 tree rp = arg0r ? arg0r
10742 : build1 (REALPART_EXPR, rtype, arg0);
10743 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10744 arg1i ? arg1i
10745 : build1 (IMAGPART_EXPR, rtype, arg1));
10746 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10751 /* Fold &x - &x. This can happen from &x.foo - &x.
10752 This is unsafe for certain floats even in non-IEEE formats.
10753 In IEEE, it is unsafe because it does wrong for NaNs.
10754 Also note that operand_equal_p is always false if an operand
10755 is volatile. */
10757 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10758 && operand_equal_p (arg0, arg1, 0))
10759 return build_zero_cst (type);
10761 /* A - B -> A + (-B) if B is easily negatable. */
10762 if (negate_expr_p (arg1)
10763 && ((FLOAT_TYPE_P (type)
10764 /* Avoid this transformation if B is a positive REAL_CST. */
10765 && (TREE_CODE (arg1) != REAL_CST
10766 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10767 || INTEGRAL_TYPE_P (type)))
10768 return fold_build2_loc (loc, PLUS_EXPR, type,
10769 fold_convert_loc (loc, type, arg0),
10770 fold_convert_loc (loc, type,
10771 negate_expr (arg1)));
10773 /* Try folding difference of addresses. */
10775 HOST_WIDE_INT diff;
10777 if ((TREE_CODE (arg0) == ADDR_EXPR
10778 || TREE_CODE (arg1) == ADDR_EXPR)
10779 && ptr_difference_const (arg0, arg1, &diff))
10780 return build_int_cst_type (type, diff);
10783 /* Fold &a[i] - &a[j] to i-j. */
10784 if (TREE_CODE (arg0) == ADDR_EXPR
10785 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10786 && TREE_CODE (arg1) == ADDR_EXPR
10787 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10789 tree tem = fold_addr_of_array_ref_difference (loc, type,
10790 TREE_OPERAND (arg0, 0),
10791 TREE_OPERAND (arg1, 0));
10792 if (tem)
10793 return tem;
10796 if (FLOAT_TYPE_P (type)
10797 && flag_unsafe_math_optimizations
10798 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10799 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10800 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10801 return tem;
10803 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10804 one. Make sure the type is not saturating and has the signedness of
10805 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10806 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10807 if ((TREE_CODE (arg0) == MULT_EXPR
10808 || TREE_CODE (arg1) == MULT_EXPR)
10809 && !TYPE_SATURATING (type)
10810 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10811 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10812 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10814 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10815 if (tem)
10816 return tem;
10819 goto associate;
10821 case MULT_EXPR:
10822 /* (-A) * (-B) -> A * B */
10823 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10824 return fold_build2_loc (loc, MULT_EXPR, type,
10825 fold_convert_loc (loc, type,
10826 TREE_OPERAND (arg0, 0)),
10827 fold_convert_loc (loc, type,
10828 negate_expr (arg1)));
10829 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10830 return fold_build2_loc (loc, MULT_EXPR, type,
10831 fold_convert_loc (loc, type,
10832 negate_expr (arg0)),
10833 fold_convert_loc (loc, type,
10834 TREE_OPERAND (arg1, 0)));
10836 if (! FLOAT_TYPE_P (type))
10838 if (integer_zerop (arg1))
10839 return omit_one_operand_loc (loc, type, arg1, arg0);
10840 if (integer_onep (arg1))
10841 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10842 /* Transform x * -1 into -x. Make sure to do the negation
10843 on the original operand with conversions not stripped
10844 because we can only strip non-sign-changing conversions. */
10845 if (integer_all_onesp (arg1))
10846 return fold_convert_loc (loc, type, negate_expr (op0));
10847 /* Transform x * -C into -x * C if x is easily negatable. */
10848 if (TREE_CODE (arg1) == INTEGER_CST
10849 && tree_int_cst_sgn (arg1) == -1
10850 && negate_expr_p (arg0)
10851 && (tem = negate_expr (arg1)) != arg1
10852 && !TREE_OVERFLOW (tem))
10853 return fold_build2_loc (loc, MULT_EXPR, type,
10854 fold_convert_loc (loc, type,
10855 negate_expr (arg0)),
10856 tem);
10858 /* (a * (1 << b)) is (a << b) */
10859 if (TREE_CODE (arg1) == LSHIFT_EXPR
10860 && integer_onep (TREE_OPERAND (arg1, 0)))
10861 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10862 TREE_OPERAND (arg1, 1));
10863 if (TREE_CODE (arg0) == LSHIFT_EXPR
10864 && integer_onep (TREE_OPERAND (arg0, 0)))
10865 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10866 TREE_OPERAND (arg0, 1));
10868 /* (A + A) * C -> A * 2 * C */
10869 if (TREE_CODE (arg0) == PLUS_EXPR
10870 && TREE_CODE (arg1) == INTEGER_CST
10871 && operand_equal_p (TREE_OPERAND (arg0, 0),
10872 TREE_OPERAND (arg0, 1), 0))
10873 return fold_build2_loc (loc, MULT_EXPR, type,
10874 omit_one_operand_loc (loc, type,
10875 TREE_OPERAND (arg0, 0),
10876 TREE_OPERAND (arg0, 1)),
10877 fold_build2_loc (loc, MULT_EXPR, type,
10878 build_int_cst (type, 2) , arg1));
10880 strict_overflow_p = false;
10881 if (TREE_CODE (arg1) == INTEGER_CST
10882 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10883 &strict_overflow_p)))
10885 if (strict_overflow_p)
10886 fold_overflow_warning (("assuming signed overflow does not "
10887 "occur when simplifying "
10888 "multiplication"),
10889 WARN_STRICT_OVERFLOW_MISC);
10890 return fold_convert_loc (loc, type, tem);
10893 /* Optimize z * conj(z) for integer complex numbers. */
10894 if (TREE_CODE (arg0) == CONJ_EXPR
10895 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10896 return fold_mult_zconjz (loc, type, arg1);
10897 if (TREE_CODE (arg1) == CONJ_EXPR
10898 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10899 return fold_mult_zconjz (loc, type, arg0);
10901 else
10903 /* Maybe fold x * 0 to 0. The expressions aren't the same
10904 when x is NaN, since x * 0 is also NaN. Nor are they the
10905 same in modes with signed zeros, since multiplying a
10906 negative value by 0 gives -0, not +0. */
10907 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10908 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10909 && real_zerop (arg1))
10910 return omit_one_operand_loc (loc, type, arg1, arg0);
10911 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10912 Likewise for complex arithmetic with signed zeros. */
10913 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10914 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10915 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10916 && real_onep (arg1))
10917 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10919 /* Transform x * -1.0 into -x. */
10920 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10921 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10922 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10923 && real_minus_onep (arg1))
10924 return fold_convert_loc (loc, type, negate_expr (arg0));
10926 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10927 the result for floating point types due to rounding so it is applied
10928 only if -fassociative-math was specify. */
10929 if (flag_associative_math
10930 && TREE_CODE (arg0) == RDIV_EXPR
10931 && TREE_CODE (arg1) == REAL_CST
10932 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10934 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10935 arg1);
10936 if (tem)
10937 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10938 TREE_OPERAND (arg0, 1));
10941 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10942 if (operand_equal_p (arg0, arg1, 0))
10944 tree tem = fold_strip_sign_ops (arg0);
10945 if (tem != NULL_TREE)
10947 tem = fold_convert_loc (loc, type, tem);
10948 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10952 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10953 This is not the same for NaNs or if signed zeros are
10954 involved. */
10955 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10956 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10957 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10958 && TREE_CODE (arg1) == COMPLEX_CST
10959 && real_zerop (TREE_REALPART (arg1)))
10961 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10962 if (real_onep (TREE_IMAGPART (arg1)))
10963 return
10964 fold_build2_loc (loc, COMPLEX_EXPR, type,
10965 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10966 rtype, arg0)),
10967 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10968 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10969 return
10970 fold_build2_loc (loc, COMPLEX_EXPR, type,
10971 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10972 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10973 rtype, arg0)));
10976 /* Optimize z * conj(z) for floating point complex numbers.
10977 Guarded by flag_unsafe_math_optimizations as non-finite
10978 imaginary components don't produce scalar results. */
10979 if (flag_unsafe_math_optimizations
10980 && TREE_CODE (arg0) == CONJ_EXPR
10981 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10982 return fold_mult_zconjz (loc, type, arg1);
10983 if (flag_unsafe_math_optimizations
10984 && TREE_CODE (arg1) == CONJ_EXPR
10985 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10986 return fold_mult_zconjz (loc, type, arg0);
10988 if (flag_unsafe_math_optimizations)
10990 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10991 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10993 /* Optimizations of root(...)*root(...). */
10994 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10996 tree rootfn, arg;
10997 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10998 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11000 /* Optimize sqrt(x)*sqrt(x) as x. */
11001 if (BUILTIN_SQRT_P (fcode0)
11002 && operand_equal_p (arg00, arg10, 0)
11003 && ! HONOR_SNANS (TYPE_MODE (type)))
11004 return arg00;
11006 /* Optimize root(x)*root(y) as root(x*y). */
11007 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11008 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11009 return build_call_expr_loc (loc, rootfn, 1, arg);
11012 /* Optimize expN(x)*expN(y) as expN(x+y). */
11013 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11015 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11016 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11017 CALL_EXPR_ARG (arg0, 0),
11018 CALL_EXPR_ARG (arg1, 0));
11019 return build_call_expr_loc (loc, expfn, 1, arg);
11022 /* Optimizations of pow(...)*pow(...). */
11023 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11024 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11025 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11027 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11028 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11029 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11030 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11032 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11033 if (operand_equal_p (arg01, arg11, 0))
11035 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11036 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11037 arg00, arg10);
11038 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11041 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11042 if (operand_equal_p (arg00, arg10, 0))
11044 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11045 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11046 arg01, arg11);
11047 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11051 /* Optimize tan(x)*cos(x) as sin(x). */
11052 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11053 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11054 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11055 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11056 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11057 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11058 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11059 CALL_EXPR_ARG (arg1, 0), 0))
11061 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11063 if (sinfn != NULL_TREE)
11064 return build_call_expr_loc (loc, sinfn, 1,
11065 CALL_EXPR_ARG (arg0, 0));
11068 /* Optimize x*pow(x,c) as pow(x,c+1). */
11069 if (fcode1 == BUILT_IN_POW
11070 || fcode1 == BUILT_IN_POWF
11071 || fcode1 == BUILT_IN_POWL)
11073 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11074 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11075 if (TREE_CODE (arg11) == REAL_CST
11076 && !TREE_OVERFLOW (arg11)
11077 && operand_equal_p (arg0, arg10, 0))
11079 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11080 REAL_VALUE_TYPE c;
11081 tree arg;
11083 c = TREE_REAL_CST (arg11);
11084 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11085 arg = build_real (type, c);
11086 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11090 /* Optimize pow(x,c)*x as pow(x,c+1). */
11091 if (fcode0 == BUILT_IN_POW
11092 || fcode0 == BUILT_IN_POWF
11093 || fcode0 == BUILT_IN_POWL)
11095 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11096 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11097 if (TREE_CODE (arg01) == REAL_CST
11098 && !TREE_OVERFLOW (arg01)
11099 && operand_equal_p (arg1, arg00, 0))
11101 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11102 REAL_VALUE_TYPE c;
11103 tree arg;
11105 c = TREE_REAL_CST (arg01);
11106 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11107 arg = build_real (type, c);
11108 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11112 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11113 if (!in_gimple_form
11114 && optimize
11115 && operand_equal_p (arg0, arg1, 0))
11117 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11119 if (powfn)
11121 tree arg = build_real (type, dconst2);
11122 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11127 goto associate;
11129 case BIT_IOR_EXPR:
11130 bit_ior:
11131 if (integer_all_onesp (arg1))
11132 return omit_one_operand_loc (loc, type, arg1, arg0);
11133 if (integer_zerop (arg1))
11134 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11135 if (operand_equal_p (arg0, arg1, 0))
11136 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11138 /* ~X | X is -1. */
11139 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11140 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11142 t1 = build_zero_cst (type);
11143 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11144 return omit_one_operand_loc (loc, type, t1, arg1);
11147 /* X | ~X is -1. */
11148 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11149 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11151 t1 = build_zero_cst (type);
11152 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11153 return omit_one_operand_loc (loc, type, t1, arg0);
11156 /* Canonicalize (X & C1) | C2. */
11157 if (TREE_CODE (arg0) == BIT_AND_EXPR
11158 && TREE_CODE (arg1) == INTEGER_CST
11159 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11161 double_int c1, c2, c3, msk;
11162 int width = TYPE_PRECISION (type), w;
11163 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11164 c2 = tree_to_double_int (arg1);
11166 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11167 if ((c1 & c2) == c1)
11168 return omit_one_operand_loc (loc, type, arg1,
11169 TREE_OPERAND (arg0, 0));
11171 msk = double_int::mask (width);
11173 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11174 if (msk.and_not (c1 | c2).is_zero ())
11175 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11176 TREE_OPERAND (arg0, 0), arg1);
11178 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11179 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11180 mode which allows further optimizations. */
11181 c1 &= msk;
11182 c2 &= msk;
11183 c3 = c1.and_not (c2);
11184 for (w = BITS_PER_UNIT;
11185 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11186 w <<= 1)
11188 unsigned HOST_WIDE_INT mask
11189 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11190 if (((c1.low | c2.low) & mask) == mask
11191 && (c1.low & ~mask) == 0 && c1.high == 0)
11193 c3 = double_int::from_uhwi (mask);
11194 break;
11197 if (c3 != c1)
11198 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11199 fold_build2_loc (loc, BIT_AND_EXPR, type,
11200 TREE_OPERAND (arg0, 0),
11201 double_int_to_tree (type,
11202 c3)),
11203 arg1);
11206 /* (X & Y) | Y is (X, Y). */
11207 if (TREE_CODE (arg0) == BIT_AND_EXPR
11208 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11209 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11210 /* (X & Y) | X is (Y, X). */
11211 if (TREE_CODE (arg0) == BIT_AND_EXPR
11212 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11213 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11214 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11215 /* X | (X & Y) is (Y, X). */
11216 if (TREE_CODE (arg1) == BIT_AND_EXPR
11217 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11218 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11219 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11220 /* X | (Y & X) is (Y, X). */
11221 if (TREE_CODE (arg1) == BIT_AND_EXPR
11222 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11223 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11224 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11226 /* (X & ~Y) | (~X & Y) is X ^ Y */
11227 if (TREE_CODE (arg0) == BIT_AND_EXPR
11228 && TREE_CODE (arg1) == BIT_AND_EXPR)
11230 tree a0, a1, l0, l1, n0, n1;
11232 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11233 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11235 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11236 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11238 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11239 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11241 if ((operand_equal_p (n0, a0, 0)
11242 && operand_equal_p (n1, a1, 0))
11243 || (operand_equal_p (n0, a1, 0)
11244 && operand_equal_p (n1, a0, 0)))
11245 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11248 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11249 if (t1 != NULL_TREE)
11250 return t1;
11252 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11254 This results in more efficient code for machines without a NAND
11255 instruction. Combine will canonicalize to the first form
11256 which will allow use of NAND instructions provided by the
11257 backend if they exist. */
11258 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11259 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11261 return
11262 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11263 build2 (BIT_AND_EXPR, type,
11264 fold_convert_loc (loc, type,
11265 TREE_OPERAND (arg0, 0)),
11266 fold_convert_loc (loc, type,
11267 TREE_OPERAND (arg1, 0))));
11270 /* See if this can be simplified into a rotate first. If that
11271 is unsuccessful continue in the association code. */
11272 goto bit_rotate;
11274 case BIT_XOR_EXPR:
11275 if (integer_zerop (arg1))
11276 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11277 if (integer_all_onesp (arg1))
11278 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11279 if (operand_equal_p (arg0, arg1, 0))
11280 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11282 /* ~X ^ X is -1. */
11283 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11284 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11286 t1 = build_zero_cst (type);
11287 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11288 return omit_one_operand_loc (loc, type, t1, arg1);
11291 /* X ^ ~X is -1. */
11292 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11293 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11295 t1 = build_zero_cst (type);
11296 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11297 return omit_one_operand_loc (loc, type, t1, arg0);
11300 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11301 with a constant, and the two constants have no bits in common,
11302 we should treat this as a BIT_IOR_EXPR since this may produce more
11303 simplifications. */
11304 if (TREE_CODE (arg0) == BIT_AND_EXPR
11305 && TREE_CODE (arg1) == BIT_AND_EXPR
11306 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11307 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11308 && integer_zerop (const_binop (BIT_AND_EXPR,
11309 TREE_OPERAND (arg0, 1),
11310 TREE_OPERAND (arg1, 1))))
11312 code = BIT_IOR_EXPR;
11313 goto bit_ior;
11316 /* (X | Y) ^ X -> Y & ~ X*/
11317 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11318 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11320 tree t2 = TREE_OPERAND (arg0, 1);
11321 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11322 arg1);
11323 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11324 fold_convert_loc (loc, type, t2),
11325 fold_convert_loc (loc, type, t1));
11326 return t1;
11329 /* (Y | X) ^ X -> Y & ~ X*/
11330 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11331 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11333 tree t2 = TREE_OPERAND (arg0, 0);
11334 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11335 arg1);
11336 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11337 fold_convert_loc (loc, type, t2),
11338 fold_convert_loc (loc, type, t1));
11339 return t1;
11342 /* X ^ (X | Y) -> Y & ~ X*/
11343 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11344 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11346 tree t2 = TREE_OPERAND (arg1, 1);
11347 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11348 arg0);
11349 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11350 fold_convert_loc (loc, type, t2),
11351 fold_convert_loc (loc, type, t1));
11352 return t1;
11355 /* X ^ (Y | X) -> Y & ~ X*/
11356 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11357 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11359 tree t2 = TREE_OPERAND (arg1, 0);
11360 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11361 arg0);
11362 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11363 fold_convert_loc (loc, type, t2),
11364 fold_convert_loc (loc, type, t1));
11365 return t1;
11368 /* Convert ~X ^ ~Y to X ^ Y. */
11369 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11370 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11371 return fold_build2_loc (loc, code, type,
11372 fold_convert_loc (loc, type,
11373 TREE_OPERAND (arg0, 0)),
11374 fold_convert_loc (loc, type,
11375 TREE_OPERAND (arg1, 0)));
11377 /* Convert ~X ^ C to X ^ ~C. */
11378 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11379 && TREE_CODE (arg1) == INTEGER_CST)
11380 return fold_build2_loc (loc, code, type,
11381 fold_convert_loc (loc, type,
11382 TREE_OPERAND (arg0, 0)),
11383 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11385 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11386 if (TREE_CODE (arg0) == BIT_AND_EXPR
11387 && integer_onep (TREE_OPERAND (arg0, 1))
11388 && integer_onep (arg1))
11389 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11390 build_zero_cst (TREE_TYPE (arg0)));
11392 /* Fold (X & Y) ^ Y as ~X & Y. */
11393 if (TREE_CODE (arg0) == BIT_AND_EXPR
11394 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11396 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11397 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11398 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11399 fold_convert_loc (loc, type, arg1));
11401 /* Fold (X & Y) ^ X as ~Y & X. */
11402 if (TREE_CODE (arg0) == BIT_AND_EXPR
11403 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11404 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11406 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11407 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11408 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11409 fold_convert_loc (loc, type, arg1));
11411 /* Fold X ^ (X & Y) as X & ~Y. */
11412 if (TREE_CODE (arg1) == BIT_AND_EXPR
11413 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11415 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11416 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11417 fold_convert_loc (loc, type, arg0),
11418 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11420 /* Fold X ^ (Y & X) as ~Y & X. */
11421 if (TREE_CODE (arg1) == BIT_AND_EXPR
11422 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11423 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11425 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11426 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11427 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11428 fold_convert_loc (loc, type, arg0));
11431 /* See if this can be simplified into a rotate first. If that
11432 is unsuccessful continue in the association code. */
11433 goto bit_rotate;
11435 case BIT_AND_EXPR:
11436 if (integer_all_onesp (arg1))
11437 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11438 if (integer_zerop (arg1))
11439 return omit_one_operand_loc (loc, type, arg1, arg0);
11440 if (operand_equal_p (arg0, arg1, 0))
11441 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11443 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11444 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11445 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11446 || (TREE_CODE (arg0) == EQ_EXPR
11447 && integer_zerop (TREE_OPERAND (arg0, 1))))
11448 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11449 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11451 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11452 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11453 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11454 || (TREE_CODE (arg1) == EQ_EXPR
11455 && integer_zerop (TREE_OPERAND (arg1, 1))))
11456 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11457 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11459 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11460 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11461 && TREE_CODE (arg1) == INTEGER_CST
11462 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11464 tree tmp1 = fold_convert_loc (loc, type, arg1);
11465 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11466 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11467 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11468 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11469 return
11470 fold_convert_loc (loc, type,
11471 fold_build2_loc (loc, BIT_IOR_EXPR,
11472 type, tmp2, tmp3));
11475 /* (X | Y) & Y is (X, Y). */
11476 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11477 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11478 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11479 /* (X | Y) & X is (Y, X). */
11480 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11481 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11482 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11483 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11484 /* X & (X | Y) is (Y, X). */
11485 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11486 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11487 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11488 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11489 /* X & (Y | X) is (Y, X). */
11490 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11491 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11492 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11493 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11495 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11496 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11497 && integer_onep (TREE_OPERAND (arg0, 1))
11498 && integer_onep (arg1))
11500 tree tem2;
11501 tem = TREE_OPERAND (arg0, 0);
11502 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11503 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11504 tem, tem2);
11505 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11506 build_zero_cst (TREE_TYPE (tem)));
11508 /* Fold ~X & 1 as (X & 1) == 0. */
11509 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11510 && integer_onep (arg1))
11512 tree tem2;
11513 tem = TREE_OPERAND (arg0, 0);
11514 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11515 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11516 tem, tem2);
11517 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11518 build_zero_cst (TREE_TYPE (tem)));
11520 /* Fold !X & 1 as X == 0. */
11521 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11522 && integer_onep (arg1))
11524 tem = TREE_OPERAND (arg0, 0);
11525 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11526 build_zero_cst (TREE_TYPE (tem)));
11529 /* Fold (X ^ Y) & Y as ~X & Y. */
11530 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11531 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11533 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11534 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11535 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11536 fold_convert_loc (loc, type, arg1));
11538 /* Fold (X ^ Y) & X as ~Y & X. */
11539 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11540 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11541 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11543 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11544 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11545 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11546 fold_convert_loc (loc, type, arg1));
11548 /* Fold X & (X ^ Y) as X & ~Y. */
11549 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11550 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11552 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11553 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11554 fold_convert_loc (loc, type, arg0),
11555 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11557 /* Fold X & (Y ^ X) as ~Y & X. */
11558 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11559 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11560 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11562 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11563 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11564 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11565 fold_convert_loc (loc, type, arg0));
11568 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11569 multiple of 1 << CST. */
11570 if (TREE_CODE (arg1) == INTEGER_CST)
11572 double_int cst1 = tree_to_double_int (arg1);
11573 double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11574 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11575 if ((cst1 & ncst1) == ncst1
11576 && multiple_of_p (type, arg0,
11577 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11578 return fold_convert_loc (loc, type, arg0);
11581 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11582 bits from CST2. */
11583 if (TREE_CODE (arg1) == INTEGER_CST
11584 && TREE_CODE (arg0) == MULT_EXPR
11585 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11587 int arg1tz
11588 = tree_to_double_int (TREE_OPERAND (arg0, 1)).trailing_zeros ();
11589 if (arg1tz > 0)
11591 double_int arg1mask, masked;
11592 arg1mask = ~double_int::mask (arg1tz);
11593 arg1mask = arg1mask.ext (TYPE_PRECISION (type),
11594 TYPE_UNSIGNED (type));
11595 masked = arg1mask & tree_to_double_int (arg1);
11596 if (masked.is_zero ())
11597 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11598 arg0, arg1);
11599 else if (masked != tree_to_double_int (arg1))
11600 return fold_build2_loc (loc, code, type, op0,
11601 double_int_to_tree (type, masked));
11605 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11606 ((A & N) + B) & M -> (A + B) & M
11607 Similarly if (N & M) == 0,
11608 ((A | N) + B) & M -> (A + B) & M
11609 and for - instead of + (or unary - instead of +)
11610 and/or ^ instead of |.
11611 If B is constant and (B & M) == 0, fold into A & M. */
11612 if (host_integerp (arg1, 1))
11614 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11615 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11616 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11617 && (TREE_CODE (arg0) == PLUS_EXPR
11618 || TREE_CODE (arg0) == MINUS_EXPR
11619 || TREE_CODE (arg0) == NEGATE_EXPR)
11620 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11621 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11623 tree pmop[2];
11624 int which = 0;
11625 unsigned HOST_WIDE_INT cst0;
11627 /* Now we know that arg0 is (C + D) or (C - D) or
11628 -C and arg1 (M) is == (1LL << cst) - 1.
11629 Store C into PMOP[0] and D into PMOP[1]. */
11630 pmop[0] = TREE_OPERAND (arg0, 0);
11631 pmop[1] = NULL;
11632 if (TREE_CODE (arg0) != NEGATE_EXPR)
11634 pmop[1] = TREE_OPERAND (arg0, 1);
11635 which = 1;
11638 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11639 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11640 & cst1) != cst1)
11641 which = -1;
11643 for (; which >= 0; which--)
11644 switch (TREE_CODE (pmop[which]))
11646 case BIT_AND_EXPR:
11647 case BIT_IOR_EXPR:
11648 case BIT_XOR_EXPR:
11649 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11650 != INTEGER_CST)
11651 break;
11652 /* tree_low_cst not used, because we don't care about
11653 the upper bits. */
11654 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11655 cst0 &= cst1;
11656 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11658 if (cst0 != cst1)
11659 break;
11661 else if (cst0 != 0)
11662 break;
11663 /* If C or D is of the form (A & N) where
11664 (N & M) == M, or of the form (A | N) or
11665 (A ^ N) where (N & M) == 0, replace it with A. */
11666 pmop[which] = TREE_OPERAND (pmop[which], 0);
11667 break;
11668 case INTEGER_CST:
11669 /* If C or D is a N where (N & M) == 0, it can be
11670 omitted (assumed 0). */
11671 if ((TREE_CODE (arg0) == PLUS_EXPR
11672 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11673 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11674 pmop[which] = NULL;
11675 break;
11676 default:
11677 break;
11680 /* Only build anything new if we optimized one or both arguments
11681 above. */
11682 if (pmop[0] != TREE_OPERAND (arg0, 0)
11683 || (TREE_CODE (arg0) != NEGATE_EXPR
11684 && pmop[1] != TREE_OPERAND (arg0, 1)))
11686 tree utype = TREE_TYPE (arg0);
11687 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11689 /* Perform the operations in a type that has defined
11690 overflow behavior. */
11691 utype = unsigned_type_for (TREE_TYPE (arg0));
11692 if (pmop[0] != NULL)
11693 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11694 if (pmop[1] != NULL)
11695 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11698 if (TREE_CODE (arg0) == NEGATE_EXPR)
11699 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11700 else if (TREE_CODE (arg0) == PLUS_EXPR)
11702 if (pmop[0] != NULL && pmop[1] != NULL)
11703 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11704 pmop[0], pmop[1]);
11705 else if (pmop[0] != NULL)
11706 tem = pmop[0];
11707 else if (pmop[1] != NULL)
11708 tem = pmop[1];
11709 else
11710 return build_int_cst (type, 0);
11712 else if (pmop[0] == NULL)
11713 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11714 else
11715 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11716 pmop[0], pmop[1]);
11717 /* TEM is now the new binary +, - or unary - replacement. */
11718 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11719 fold_convert_loc (loc, utype, arg1));
11720 return fold_convert_loc (loc, type, tem);
11725 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11726 if (t1 != NULL_TREE)
11727 return t1;
11728 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11729 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11730 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11732 unsigned int prec
11733 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11735 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11736 && (~TREE_INT_CST_LOW (arg1)
11737 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11738 return
11739 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11742 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11744 This results in more efficient code for machines without a NOR
11745 instruction. Combine will canonicalize to the first form
11746 which will allow use of NOR instructions provided by the
11747 backend if they exist. */
11748 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11749 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11751 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11752 build2 (BIT_IOR_EXPR, type,
11753 fold_convert_loc (loc, type,
11754 TREE_OPERAND (arg0, 0)),
11755 fold_convert_loc (loc, type,
11756 TREE_OPERAND (arg1, 0))));
11759 /* If arg0 is derived from the address of an object or function, we may
11760 be able to fold this expression using the object or function's
11761 alignment. */
11762 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11764 unsigned HOST_WIDE_INT modulus, residue;
11765 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11767 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11768 integer_onep (arg1));
11770 /* This works because modulus is a power of 2. If this weren't the
11771 case, we'd have to replace it by its greatest power-of-2
11772 divisor: modulus & -modulus. */
11773 if (low < modulus)
11774 return build_int_cst (type, residue & low);
11777 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11778 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11779 if the new mask might be further optimized. */
11780 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11781 || TREE_CODE (arg0) == RSHIFT_EXPR)
11782 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11783 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11784 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11785 < TYPE_PRECISION (TREE_TYPE (arg0))
11786 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11787 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11789 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11790 unsigned HOST_WIDE_INT mask
11791 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11792 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11793 tree shift_type = TREE_TYPE (arg0);
11795 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11796 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11797 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11798 && TYPE_PRECISION (TREE_TYPE (arg0))
11799 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11801 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11802 tree arg00 = TREE_OPERAND (arg0, 0);
11803 /* See if more bits can be proven as zero because of
11804 zero extension. */
11805 if (TREE_CODE (arg00) == NOP_EXPR
11806 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11808 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11809 if (TYPE_PRECISION (inner_type)
11810 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11811 && TYPE_PRECISION (inner_type) < prec)
11813 prec = TYPE_PRECISION (inner_type);
11814 /* See if we can shorten the right shift. */
11815 if (shiftc < prec)
11816 shift_type = inner_type;
11819 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11820 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11821 zerobits <<= prec - shiftc;
11822 /* For arithmetic shift if sign bit could be set, zerobits
11823 can contain actually sign bits, so no transformation is
11824 possible, unless MASK masks them all away. In that
11825 case the shift needs to be converted into logical shift. */
11826 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11827 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11829 if ((mask & zerobits) == 0)
11830 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11831 else
11832 zerobits = 0;
11836 /* ((X << 16) & 0xff00) is (X, 0). */
11837 if ((mask & zerobits) == mask)
11838 return omit_one_operand_loc (loc, type,
11839 build_int_cst (type, 0), arg0);
11841 newmask = mask | zerobits;
11842 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11844 unsigned int prec;
11846 /* Only do the transformation if NEWMASK is some integer
11847 mode's mask. */
11848 for (prec = BITS_PER_UNIT;
11849 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11850 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11851 break;
11852 if (prec < HOST_BITS_PER_WIDE_INT
11853 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11855 tree newmaskt;
11857 if (shift_type != TREE_TYPE (arg0))
11859 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11860 fold_convert_loc (loc, shift_type,
11861 TREE_OPERAND (arg0, 0)),
11862 TREE_OPERAND (arg0, 1));
11863 tem = fold_convert_loc (loc, type, tem);
11865 else
11866 tem = op0;
11867 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11868 if (!tree_int_cst_equal (newmaskt, arg1))
11869 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11874 goto associate;
11876 case RDIV_EXPR:
11877 /* Don't touch a floating-point divide by zero unless the mode
11878 of the constant can represent infinity. */
11879 if (TREE_CODE (arg1) == REAL_CST
11880 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11881 && real_zerop (arg1))
11882 return NULL_TREE;
11884 /* Optimize A / A to 1.0 if we don't care about
11885 NaNs or Infinities. Skip the transformation
11886 for non-real operands. */
11887 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11888 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11889 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11890 && operand_equal_p (arg0, arg1, 0))
11892 tree r = build_real (TREE_TYPE (arg0), dconst1);
11894 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11897 /* The complex version of the above A / A optimization. */
11898 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11899 && operand_equal_p (arg0, arg1, 0))
11901 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11902 if (! HONOR_NANS (TYPE_MODE (elem_type))
11903 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11905 tree r = build_real (elem_type, dconst1);
11906 /* omit_two_operands will call fold_convert for us. */
11907 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11911 /* (-A) / (-B) -> A / B */
11912 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11913 return fold_build2_loc (loc, RDIV_EXPR, type,
11914 TREE_OPERAND (arg0, 0),
11915 negate_expr (arg1));
11916 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11917 return fold_build2_loc (loc, RDIV_EXPR, type,
11918 negate_expr (arg0),
11919 TREE_OPERAND (arg1, 0));
11921 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11922 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11923 && real_onep (arg1))
11924 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11926 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11927 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11928 && real_minus_onep (arg1))
11929 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11930 negate_expr (arg0)));
11932 /* If ARG1 is a constant, we can convert this to a multiply by the
11933 reciprocal. This does not have the same rounding properties,
11934 so only do this if -freciprocal-math. We can actually
11935 always safely do it if ARG1 is a power of two, but it's hard to
11936 tell if it is or not in a portable manner. */
11937 if (optimize
11938 && (TREE_CODE (arg1) == REAL_CST
11939 || (TREE_CODE (arg1) == COMPLEX_CST
11940 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11941 || (TREE_CODE (arg1) == VECTOR_CST
11942 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11944 if (flag_reciprocal_math
11945 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11946 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11947 /* Find the reciprocal if optimizing and the result is exact.
11948 TODO: Complex reciprocal not implemented. */
11949 if (TREE_CODE (arg1) != COMPLEX_CST)
11951 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11953 if (inverse)
11954 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11957 /* Convert A/B/C to A/(B*C). */
11958 if (flag_reciprocal_math
11959 && TREE_CODE (arg0) == RDIV_EXPR)
11960 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11961 fold_build2_loc (loc, MULT_EXPR, type,
11962 TREE_OPERAND (arg0, 1), arg1));
11964 /* Convert A/(B/C) to (A/B)*C. */
11965 if (flag_reciprocal_math
11966 && TREE_CODE (arg1) == RDIV_EXPR)
11967 return fold_build2_loc (loc, MULT_EXPR, type,
11968 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11969 TREE_OPERAND (arg1, 0)),
11970 TREE_OPERAND (arg1, 1));
11972 /* Convert C1/(X*C2) into (C1/C2)/X. */
11973 if (flag_reciprocal_math
11974 && TREE_CODE (arg1) == MULT_EXPR
11975 && TREE_CODE (arg0) == REAL_CST
11976 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11978 tree tem = const_binop (RDIV_EXPR, arg0,
11979 TREE_OPERAND (arg1, 1));
11980 if (tem)
11981 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11982 TREE_OPERAND (arg1, 0));
11985 if (flag_unsafe_math_optimizations)
11987 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11988 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11990 /* Optimize sin(x)/cos(x) as tan(x). */
11991 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11992 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11993 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11994 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11995 CALL_EXPR_ARG (arg1, 0), 0))
11997 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11999 if (tanfn != NULL_TREE)
12000 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12003 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12004 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12005 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12006 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12007 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12008 CALL_EXPR_ARG (arg1, 0), 0))
12010 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12012 if (tanfn != NULL_TREE)
12014 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12015 CALL_EXPR_ARG (arg0, 0));
12016 return fold_build2_loc (loc, RDIV_EXPR, type,
12017 build_real (type, dconst1), tmp);
12021 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12022 NaNs or Infinities. */
12023 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12024 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12025 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12027 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12028 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12030 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12031 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12032 && operand_equal_p (arg00, arg01, 0))
12034 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12036 if (cosfn != NULL_TREE)
12037 return build_call_expr_loc (loc, cosfn, 1, arg00);
12041 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12042 NaNs or Infinities. */
12043 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12044 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12045 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12047 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12048 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12050 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12051 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12052 && operand_equal_p (arg00, arg01, 0))
12054 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12056 if (cosfn != NULL_TREE)
12058 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12059 return fold_build2_loc (loc, RDIV_EXPR, type,
12060 build_real (type, dconst1),
12061 tmp);
12066 /* Optimize pow(x,c)/x as pow(x,c-1). */
12067 if (fcode0 == BUILT_IN_POW
12068 || fcode0 == BUILT_IN_POWF
12069 || fcode0 == BUILT_IN_POWL)
12071 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12072 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12073 if (TREE_CODE (arg01) == REAL_CST
12074 && !TREE_OVERFLOW (arg01)
12075 && operand_equal_p (arg1, arg00, 0))
12077 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12078 REAL_VALUE_TYPE c;
12079 tree arg;
12081 c = TREE_REAL_CST (arg01);
12082 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12083 arg = build_real (type, c);
12084 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12088 /* Optimize a/root(b/c) into a*root(c/b). */
12089 if (BUILTIN_ROOT_P (fcode1))
12091 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12093 if (TREE_CODE (rootarg) == RDIV_EXPR)
12095 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12096 tree b = TREE_OPERAND (rootarg, 0);
12097 tree c = TREE_OPERAND (rootarg, 1);
12099 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12101 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12102 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12106 /* Optimize x/expN(y) into x*expN(-y). */
12107 if (BUILTIN_EXPONENT_P (fcode1))
12109 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12110 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12111 arg1 = build_call_expr_loc (loc,
12112 expfn, 1,
12113 fold_convert_loc (loc, type, arg));
12114 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12117 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12118 if (fcode1 == BUILT_IN_POW
12119 || fcode1 == BUILT_IN_POWF
12120 || fcode1 == BUILT_IN_POWL)
12122 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12123 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12124 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12125 tree neg11 = fold_convert_loc (loc, type,
12126 negate_expr (arg11));
12127 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12128 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12131 return NULL_TREE;
12133 case TRUNC_DIV_EXPR:
12134 /* Optimize (X & (-A)) / A where A is a power of 2,
12135 to X >> log2(A) */
12136 if (TREE_CODE (arg0) == BIT_AND_EXPR
12137 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12138 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12140 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12141 arg1, TREE_OPERAND (arg0, 1));
12142 if (sum && integer_zerop (sum)) {
12143 unsigned long pow2;
12145 if (TREE_INT_CST_LOW (arg1))
12146 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12147 else
12148 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12149 + HOST_BITS_PER_WIDE_INT;
12151 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12152 TREE_OPERAND (arg0, 0),
12153 build_int_cst (integer_type_node, pow2));
12157 /* Fall through */
12159 case FLOOR_DIV_EXPR:
12160 /* Simplify A / (B << N) where A and B are positive and B is
12161 a power of 2, to A >> (N + log2(B)). */
12162 strict_overflow_p = false;
12163 if (TREE_CODE (arg1) == LSHIFT_EXPR
12164 && (TYPE_UNSIGNED (type)
12165 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12167 tree sval = TREE_OPERAND (arg1, 0);
12168 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12170 tree sh_cnt = TREE_OPERAND (arg1, 1);
12171 unsigned long pow2;
12173 if (TREE_INT_CST_LOW (sval))
12174 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12175 else
12176 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12177 + HOST_BITS_PER_WIDE_INT;
12179 if (strict_overflow_p)
12180 fold_overflow_warning (("assuming signed overflow does not "
12181 "occur when simplifying A / (B << N)"),
12182 WARN_STRICT_OVERFLOW_MISC);
12184 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12185 sh_cnt,
12186 build_int_cst (TREE_TYPE (sh_cnt),
12187 pow2));
12188 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12189 fold_convert_loc (loc, type, arg0), sh_cnt);
12193 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12194 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12195 if (INTEGRAL_TYPE_P (type)
12196 && TYPE_UNSIGNED (type)
12197 && code == FLOOR_DIV_EXPR)
12198 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12200 /* Fall through */
12202 case ROUND_DIV_EXPR:
12203 case CEIL_DIV_EXPR:
12204 case EXACT_DIV_EXPR:
12205 if (integer_onep (arg1))
12206 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12207 if (integer_zerop (arg1))
12208 return NULL_TREE;
12209 /* X / -1 is -X. */
12210 if (!TYPE_UNSIGNED (type)
12211 && TREE_CODE (arg1) == INTEGER_CST
12212 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12213 && TREE_INT_CST_HIGH (arg1) == -1)
12214 return fold_convert_loc (loc, type, negate_expr (arg0));
12216 /* Convert -A / -B to A / B when the type is signed and overflow is
12217 undefined. */
12218 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12219 && TREE_CODE (arg0) == NEGATE_EXPR
12220 && negate_expr_p (arg1))
12222 if (INTEGRAL_TYPE_P (type))
12223 fold_overflow_warning (("assuming signed overflow does not occur "
12224 "when distributing negation across "
12225 "division"),
12226 WARN_STRICT_OVERFLOW_MISC);
12227 return fold_build2_loc (loc, code, type,
12228 fold_convert_loc (loc, type,
12229 TREE_OPERAND (arg0, 0)),
12230 fold_convert_loc (loc, type,
12231 negate_expr (arg1)));
12233 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12234 && TREE_CODE (arg1) == NEGATE_EXPR
12235 && negate_expr_p (arg0))
12237 if (INTEGRAL_TYPE_P (type))
12238 fold_overflow_warning (("assuming signed overflow does not occur "
12239 "when distributing negation across "
12240 "division"),
12241 WARN_STRICT_OVERFLOW_MISC);
12242 return fold_build2_loc (loc, code, type,
12243 fold_convert_loc (loc, type,
12244 negate_expr (arg0)),
12245 fold_convert_loc (loc, type,
12246 TREE_OPERAND (arg1, 0)));
12249 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12250 operation, EXACT_DIV_EXPR.
12252 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12253 At one time others generated faster code, it's not clear if they do
12254 after the last round to changes to the DIV code in expmed.c. */
12255 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12256 && multiple_of_p (type, arg0, arg1))
12257 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12259 strict_overflow_p = false;
12260 if (TREE_CODE (arg1) == INTEGER_CST
12261 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12262 &strict_overflow_p)))
12264 if (strict_overflow_p)
12265 fold_overflow_warning (("assuming signed overflow does not occur "
12266 "when simplifying division"),
12267 WARN_STRICT_OVERFLOW_MISC);
12268 return fold_convert_loc (loc, type, tem);
12271 return NULL_TREE;
12273 case CEIL_MOD_EXPR:
12274 case FLOOR_MOD_EXPR:
12275 case ROUND_MOD_EXPR:
12276 case TRUNC_MOD_EXPR:
12277 /* X % 1 is always zero, but be sure to preserve any side
12278 effects in X. */
12279 if (integer_onep (arg1))
12280 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12282 /* X % 0, return X % 0 unchanged so that we can get the
12283 proper warnings and errors. */
12284 if (integer_zerop (arg1))
12285 return NULL_TREE;
12287 /* 0 % X is always zero, but be sure to preserve any side
12288 effects in X. Place this after checking for X == 0. */
12289 if (integer_zerop (arg0))
12290 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12292 /* X % -1 is zero. */
12293 if (!TYPE_UNSIGNED (type)
12294 && TREE_CODE (arg1) == INTEGER_CST
12295 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12296 && TREE_INT_CST_HIGH (arg1) == -1)
12297 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12299 /* X % -C is the same as X % C. */
12300 if (code == TRUNC_MOD_EXPR
12301 && !TYPE_UNSIGNED (type)
12302 && TREE_CODE (arg1) == INTEGER_CST
12303 && !TREE_OVERFLOW (arg1)
12304 && TREE_INT_CST_HIGH (arg1) < 0
12305 && !TYPE_OVERFLOW_TRAPS (type)
12306 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12307 && !sign_bit_p (arg1, arg1))
12308 return fold_build2_loc (loc, code, type,
12309 fold_convert_loc (loc, type, arg0),
12310 fold_convert_loc (loc, type,
12311 negate_expr (arg1)));
12313 /* X % -Y is the same as X % Y. */
12314 if (code == TRUNC_MOD_EXPR
12315 && !TYPE_UNSIGNED (type)
12316 && TREE_CODE (arg1) == NEGATE_EXPR
12317 && !TYPE_OVERFLOW_TRAPS (type))
12318 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12319 fold_convert_loc (loc, type,
12320 TREE_OPERAND (arg1, 0)));
12322 strict_overflow_p = false;
12323 if (TREE_CODE (arg1) == INTEGER_CST
12324 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12325 &strict_overflow_p)))
12327 if (strict_overflow_p)
12328 fold_overflow_warning (("assuming signed overflow does not occur "
12329 "when simplifying modulus"),
12330 WARN_STRICT_OVERFLOW_MISC);
12331 return fold_convert_loc (loc, type, tem);
12334 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12335 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12336 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12337 && (TYPE_UNSIGNED (type)
12338 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12340 tree c = arg1;
12341 /* Also optimize A % (C << N) where C is a power of 2,
12342 to A & ((C << N) - 1). */
12343 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12344 c = TREE_OPERAND (arg1, 0);
12346 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12348 tree mask
12349 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12350 build_int_cst (TREE_TYPE (arg1), 1));
12351 if (strict_overflow_p)
12352 fold_overflow_warning (("assuming signed overflow does not "
12353 "occur when simplifying "
12354 "X % (power of two)"),
12355 WARN_STRICT_OVERFLOW_MISC);
12356 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12357 fold_convert_loc (loc, type, arg0),
12358 fold_convert_loc (loc, type, mask));
12362 return NULL_TREE;
12364 case LROTATE_EXPR:
12365 case RROTATE_EXPR:
12366 if (integer_all_onesp (arg0))
12367 return omit_one_operand_loc (loc, type, arg0, arg1);
12368 goto shift;
12370 case RSHIFT_EXPR:
12371 /* Optimize -1 >> x for arithmetic right shifts. */
12372 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12373 && tree_expr_nonnegative_p (arg1))
12374 return omit_one_operand_loc (loc, type, arg0, arg1);
12375 /* ... fall through ... */
12377 case LSHIFT_EXPR:
12378 shift:
12379 if (integer_zerop (arg1))
12380 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12381 if (integer_zerop (arg0))
12382 return omit_one_operand_loc (loc, type, arg0, arg1);
12384 /* Since negative shift count is not well-defined,
12385 don't try to compute it in the compiler. */
12386 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12387 return NULL_TREE;
12389 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12390 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12391 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12392 && host_integerp (TREE_OPERAND (arg0, 1), false)
12393 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12395 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12396 + TREE_INT_CST_LOW (arg1));
12398 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12399 being well defined. */
12400 if (low >= TYPE_PRECISION (type))
12402 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12403 low = low % TYPE_PRECISION (type);
12404 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12405 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12406 TREE_OPERAND (arg0, 0));
12407 else
12408 low = TYPE_PRECISION (type) - 1;
12411 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12412 build_int_cst (type, low));
12415 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12416 into x & ((unsigned)-1 >> c) for unsigned types. */
12417 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12418 || (TYPE_UNSIGNED (type)
12419 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12420 && host_integerp (arg1, false)
12421 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12422 && host_integerp (TREE_OPERAND (arg0, 1), false)
12423 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12425 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12426 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12427 tree lshift;
12428 tree arg00;
12430 if (low0 == low1)
12432 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12434 lshift = build_int_cst (type, -1);
12435 lshift = int_const_binop (code, lshift, arg1);
12437 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12441 /* Rewrite an LROTATE_EXPR by a constant into an
12442 RROTATE_EXPR by a new constant. */
12443 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12445 tree tem = build_int_cst (TREE_TYPE (arg1),
12446 TYPE_PRECISION (type));
12447 tem = const_binop (MINUS_EXPR, tem, arg1);
12448 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12451 /* If we have a rotate of a bit operation with the rotate count and
12452 the second operand of the bit operation both constant,
12453 permute the two operations. */
12454 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12455 && (TREE_CODE (arg0) == BIT_AND_EXPR
12456 || TREE_CODE (arg0) == BIT_IOR_EXPR
12457 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12458 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12459 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12460 fold_build2_loc (loc, code, type,
12461 TREE_OPERAND (arg0, 0), arg1),
12462 fold_build2_loc (loc, code, type,
12463 TREE_OPERAND (arg0, 1), arg1));
12465 /* Two consecutive rotates adding up to the precision of the
12466 type can be ignored. */
12467 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12468 && TREE_CODE (arg0) == RROTATE_EXPR
12469 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12470 && TREE_INT_CST_HIGH (arg1) == 0
12471 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12472 && ((TREE_INT_CST_LOW (arg1)
12473 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12474 == (unsigned int) TYPE_PRECISION (type)))
12475 return TREE_OPERAND (arg0, 0);
12477 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12478 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12479 if the latter can be further optimized. */
12480 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12481 && TREE_CODE (arg0) == BIT_AND_EXPR
12482 && TREE_CODE (arg1) == INTEGER_CST
12483 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12485 tree mask = fold_build2_loc (loc, code, type,
12486 fold_convert_loc (loc, type,
12487 TREE_OPERAND (arg0, 1)),
12488 arg1);
12489 tree shift = fold_build2_loc (loc, code, type,
12490 fold_convert_loc (loc, type,
12491 TREE_OPERAND (arg0, 0)),
12492 arg1);
12493 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12494 if (tem)
12495 return tem;
12498 return NULL_TREE;
12500 case MIN_EXPR:
12501 if (operand_equal_p (arg0, arg1, 0))
12502 return omit_one_operand_loc (loc, type, arg0, arg1);
12503 if (INTEGRAL_TYPE_P (type)
12504 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12505 return omit_one_operand_loc (loc, type, arg1, arg0);
12506 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12507 if (tem)
12508 return tem;
12509 goto associate;
12511 case MAX_EXPR:
12512 if (operand_equal_p (arg0, arg1, 0))
12513 return omit_one_operand_loc (loc, type, arg0, arg1);
12514 if (INTEGRAL_TYPE_P (type)
12515 && TYPE_MAX_VALUE (type)
12516 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12517 return omit_one_operand_loc (loc, type, arg1, arg0);
12518 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12519 if (tem)
12520 return tem;
12521 goto associate;
12523 case TRUTH_ANDIF_EXPR:
12524 /* Note that the operands of this must be ints
12525 and their values must be 0 or 1.
12526 ("true" is a fixed value perhaps depending on the language.) */
12527 /* If first arg is constant zero, return it. */
12528 if (integer_zerop (arg0))
12529 return fold_convert_loc (loc, type, arg0);
12530 case TRUTH_AND_EXPR:
12531 /* If either arg is constant true, drop it. */
12532 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12533 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12534 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12535 /* Preserve sequence points. */
12536 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12537 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12538 /* If second arg is constant zero, result is zero, but first arg
12539 must be evaluated. */
12540 if (integer_zerop (arg1))
12541 return omit_one_operand_loc (loc, type, arg1, arg0);
12542 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12543 case will be handled here. */
12544 if (integer_zerop (arg0))
12545 return omit_one_operand_loc (loc, type, arg0, arg1);
12547 /* !X && X is always false. */
12548 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12549 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12550 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12551 /* X && !X is always false. */
12552 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12553 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12554 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12556 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12557 means A >= Y && A != MAX, but in this case we know that
12558 A < X <= MAX. */
12560 if (!TREE_SIDE_EFFECTS (arg0)
12561 && !TREE_SIDE_EFFECTS (arg1))
12563 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12564 if (tem && !operand_equal_p (tem, arg0, 0))
12565 return fold_build2_loc (loc, code, type, tem, arg1);
12567 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12568 if (tem && !operand_equal_p (tem, arg1, 0))
12569 return fold_build2_loc (loc, code, type, arg0, tem);
12572 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12573 != NULL_TREE)
12574 return tem;
12576 return NULL_TREE;
12578 case TRUTH_ORIF_EXPR:
12579 /* Note that the operands of this must be ints
12580 and their values must be 0 or true.
12581 ("true" is a fixed value perhaps depending on the language.) */
12582 /* If first arg is constant true, return it. */
12583 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12584 return fold_convert_loc (loc, type, arg0);
12585 case TRUTH_OR_EXPR:
12586 /* If either arg is constant zero, drop it. */
12587 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12588 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12589 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12590 /* Preserve sequence points. */
12591 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12592 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12593 /* If second arg is constant true, result is true, but we must
12594 evaluate first arg. */
12595 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12596 return omit_one_operand_loc (loc, type, arg1, arg0);
12597 /* Likewise for first arg, but note this only occurs here for
12598 TRUTH_OR_EXPR. */
12599 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12600 return omit_one_operand_loc (loc, type, arg0, arg1);
12602 /* !X || X is always true. */
12603 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12604 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12605 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12606 /* X || !X is always true. */
12607 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12608 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12609 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12611 /* (X && !Y) || (!X && Y) is X ^ Y */
12612 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12613 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12615 tree a0, a1, l0, l1, n0, n1;
12617 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12618 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12620 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12621 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12623 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12624 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12626 if ((operand_equal_p (n0, a0, 0)
12627 && operand_equal_p (n1, a1, 0))
12628 || (operand_equal_p (n0, a1, 0)
12629 && operand_equal_p (n1, a0, 0)))
12630 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12633 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12634 != NULL_TREE)
12635 return tem;
12637 return NULL_TREE;
12639 case TRUTH_XOR_EXPR:
12640 /* If the second arg is constant zero, drop it. */
12641 if (integer_zerop (arg1))
12642 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12643 /* If the second arg is constant true, this is a logical inversion. */
12644 if (integer_onep (arg1))
12646 /* Only call invert_truthvalue if operand is a truth value. */
12647 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12648 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12649 else
12650 tem = invert_truthvalue_loc (loc, arg0);
12651 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12653 /* Identical arguments cancel to zero. */
12654 if (operand_equal_p (arg0, arg1, 0))
12655 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12657 /* !X ^ X is always true. */
12658 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12659 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12660 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12662 /* X ^ !X is always true. */
12663 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12664 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12665 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12667 return NULL_TREE;
12669 case EQ_EXPR:
12670 case NE_EXPR:
12671 STRIP_NOPS (arg0);
12672 STRIP_NOPS (arg1);
12674 tem = fold_comparison (loc, code, type, op0, op1);
12675 if (tem != NULL_TREE)
12676 return tem;
12678 /* bool_var != 0 becomes bool_var. */
12679 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12680 && code == NE_EXPR)
12681 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12683 /* bool_var == 1 becomes bool_var. */
12684 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12685 && code == EQ_EXPR)
12686 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12688 /* bool_var != 1 becomes !bool_var. */
12689 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12690 && code == NE_EXPR)
12691 return fold_convert_loc (loc, type,
12692 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12693 TREE_TYPE (arg0), arg0));
12695 /* bool_var == 0 becomes !bool_var. */
12696 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12697 && code == EQ_EXPR)
12698 return fold_convert_loc (loc, type,
12699 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12700 TREE_TYPE (arg0), arg0));
12702 /* !exp != 0 becomes !exp */
12703 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12704 && code == NE_EXPR)
12705 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12707 /* If this is an equality comparison of the address of two non-weak,
12708 unaliased symbols neither of which are extern (since we do not
12709 have access to attributes for externs), then we know the result. */
12710 if (TREE_CODE (arg0) == ADDR_EXPR
12711 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12712 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12713 && ! lookup_attribute ("alias",
12714 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12715 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12716 && TREE_CODE (arg1) == ADDR_EXPR
12717 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12718 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12719 && ! lookup_attribute ("alias",
12720 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12721 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12723 /* We know that we're looking at the address of two
12724 non-weak, unaliased, static _DECL nodes.
12726 It is both wasteful and incorrect to call operand_equal_p
12727 to compare the two ADDR_EXPR nodes. It is wasteful in that
12728 all we need to do is test pointer equality for the arguments
12729 to the two ADDR_EXPR nodes. It is incorrect to use
12730 operand_equal_p as that function is NOT equivalent to a
12731 C equality test. It can in fact return false for two
12732 objects which would test as equal using the C equality
12733 operator. */
12734 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12735 return constant_boolean_node (equal
12736 ? code == EQ_EXPR : code != EQ_EXPR,
12737 type);
12740 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12741 a MINUS_EXPR of a constant, we can convert it into a comparison with
12742 a revised constant as long as no overflow occurs. */
12743 if (TREE_CODE (arg1) == INTEGER_CST
12744 && (TREE_CODE (arg0) == PLUS_EXPR
12745 || TREE_CODE (arg0) == MINUS_EXPR)
12746 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12747 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12748 ? MINUS_EXPR : PLUS_EXPR,
12749 fold_convert_loc (loc, TREE_TYPE (arg0),
12750 arg1),
12751 TREE_OPERAND (arg0, 1)))
12752 && !TREE_OVERFLOW (tem))
12753 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12755 /* Similarly for a NEGATE_EXPR. */
12756 if (TREE_CODE (arg0) == NEGATE_EXPR
12757 && TREE_CODE (arg1) == INTEGER_CST
12758 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12759 arg1)))
12760 && TREE_CODE (tem) == INTEGER_CST
12761 && !TREE_OVERFLOW (tem))
12762 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12764 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12765 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12766 && TREE_CODE (arg1) == INTEGER_CST
12767 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12768 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12769 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12770 fold_convert_loc (loc,
12771 TREE_TYPE (arg0),
12772 arg1),
12773 TREE_OPERAND (arg0, 1)));
12775 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12776 if ((TREE_CODE (arg0) == PLUS_EXPR
12777 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12778 || TREE_CODE (arg0) == MINUS_EXPR)
12779 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12780 0)),
12781 arg1, 0)
12782 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12783 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12785 tree val = TREE_OPERAND (arg0, 1);
12786 return omit_two_operands_loc (loc, type,
12787 fold_build2_loc (loc, code, type,
12788 val,
12789 build_int_cst (TREE_TYPE (val),
12790 0)),
12791 TREE_OPERAND (arg0, 0), arg1);
12794 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12795 if (TREE_CODE (arg0) == MINUS_EXPR
12796 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12797 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12798 1)),
12799 arg1, 0)
12800 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12802 return omit_two_operands_loc (loc, type,
12803 code == NE_EXPR
12804 ? boolean_true_node : boolean_false_node,
12805 TREE_OPERAND (arg0, 1), arg1);
12808 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12809 for !=. Don't do this for ordered comparisons due to overflow. */
12810 if (TREE_CODE (arg0) == MINUS_EXPR
12811 && integer_zerop (arg1))
12812 return fold_build2_loc (loc, code, type,
12813 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12815 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12816 if (TREE_CODE (arg0) == ABS_EXPR
12817 && (integer_zerop (arg1) || real_zerop (arg1)))
12818 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12820 /* If this is an EQ or NE comparison with zero and ARG0 is
12821 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12822 two operations, but the latter can be done in one less insn
12823 on machines that have only two-operand insns or on which a
12824 constant cannot be the first operand. */
12825 if (TREE_CODE (arg0) == BIT_AND_EXPR
12826 && integer_zerop (arg1))
12828 tree arg00 = TREE_OPERAND (arg0, 0);
12829 tree arg01 = TREE_OPERAND (arg0, 1);
12830 if (TREE_CODE (arg00) == LSHIFT_EXPR
12831 && integer_onep (TREE_OPERAND (arg00, 0)))
12833 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12834 arg01, TREE_OPERAND (arg00, 1));
12835 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12836 build_int_cst (TREE_TYPE (arg0), 1));
12837 return fold_build2_loc (loc, code, type,
12838 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12839 arg1);
12841 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12842 && integer_onep (TREE_OPERAND (arg01, 0)))
12844 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12845 arg00, TREE_OPERAND (arg01, 1));
12846 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12847 build_int_cst (TREE_TYPE (arg0), 1));
12848 return fold_build2_loc (loc, code, type,
12849 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12850 arg1);
12854 /* If this is an NE or EQ comparison of zero against the result of a
12855 signed MOD operation whose second operand is a power of 2, make
12856 the MOD operation unsigned since it is simpler and equivalent. */
12857 if (integer_zerop (arg1)
12858 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12859 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12860 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12861 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12862 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12863 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12865 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12866 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12867 fold_convert_loc (loc, newtype,
12868 TREE_OPERAND (arg0, 0)),
12869 fold_convert_loc (loc, newtype,
12870 TREE_OPERAND (arg0, 1)));
12872 return fold_build2_loc (loc, code, type, newmod,
12873 fold_convert_loc (loc, newtype, arg1));
12876 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12877 C1 is a valid shift constant, and C2 is a power of two, i.e.
12878 a single bit. */
12879 if (TREE_CODE (arg0) == BIT_AND_EXPR
12880 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12881 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12882 == INTEGER_CST
12883 && integer_pow2p (TREE_OPERAND (arg0, 1))
12884 && integer_zerop (arg1))
12886 tree itype = TREE_TYPE (arg0);
12887 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12888 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12890 /* Check for a valid shift count. */
12891 if (TREE_INT_CST_HIGH (arg001) == 0
12892 && TREE_INT_CST_LOW (arg001) < prec)
12894 tree arg01 = TREE_OPERAND (arg0, 1);
12895 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12896 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12897 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12898 can be rewritten as (X & (C2 << C1)) != 0. */
12899 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12901 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12902 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12903 return fold_build2_loc (loc, code, type, tem,
12904 fold_convert_loc (loc, itype, arg1));
12906 /* Otherwise, for signed (arithmetic) shifts,
12907 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12908 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12909 else if (!TYPE_UNSIGNED (itype))
12910 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12911 arg000, build_int_cst (itype, 0));
12912 /* Otherwise, of unsigned (logical) shifts,
12913 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12914 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12915 else
12916 return omit_one_operand_loc (loc, type,
12917 code == EQ_EXPR ? integer_one_node
12918 : integer_zero_node,
12919 arg000);
12923 /* If we have (A & C) == C where C is a power of 2, convert this into
12924 (A & C) != 0. Similarly for NE_EXPR. */
12925 if (TREE_CODE (arg0) == BIT_AND_EXPR
12926 && integer_pow2p (TREE_OPERAND (arg0, 1))
12927 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12928 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12929 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12930 integer_zero_node));
12932 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12933 bit, then fold the expression into A < 0 or A >= 0. */
12934 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12935 if (tem)
12936 return tem;
12938 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12939 Similarly for NE_EXPR. */
12940 if (TREE_CODE (arg0) == BIT_AND_EXPR
12941 && TREE_CODE (arg1) == INTEGER_CST
12942 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12944 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12945 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12946 TREE_OPERAND (arg0, 1));
12947 tree dandnotc
12948 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12949 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12950 notc);
12951 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12952 if (integer_nonzerop (dandnotc))
12953 return omit_one_operand_loc (loc, type, rslt, arg0);
12956 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12957 Similarly for NE_EXPR. */
12958 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12959 && TREE_CODE (arg1) == INTEGER_CST
12960 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12962 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12963 tree candnotd
12964 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12965 TREE_OPERAND (arg0, 1),
12966 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12967 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12968 if (integer_nonzerop (candnotd))
12969 return omit_one_operand_loc (loc, type, rslt, arg0);
12972 /* If this is a comparison of a field, we may be able to simplify it. */
12973 if ((TREE_CODE (arg0) == COMPONENT_REF
12974 || TREE_CODE (arg0) == BIT_FIELD_REF)
12975 /* Handle the constant case even without -O
12976 to make sure the warnings are given. */
12977 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12979 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12980 if (t1)
12981 return t1;
12984 /* Optimize comparisons of strlen vs zero to a compare of the
12985 first character of the string vs zero. To wit,
12986 strlen(ptr) == 0 => *ptr == 0
12987 strlen(ptr) != 0 => *ptr != 0
12988 Other cases should reduce to one of these two (or a constant)
12989 due to the return value of strlen being unsigned. */
12990 if (TREE_CODE (arg0) == CALL_EXPR
12991 && integer_zerop (arg1))
12993 tree fndecl = get_callee_fndecl (arg0);
12995 if (fndecl
12996 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12997 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12998 && call_expr_nargs (arg0) == 1
12999 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13001 tree iref = build_fold_indirect_ref_loc (loc,
13002 CALL_EXPR_ARG (arg0, 0));
13003 return fold_build2_loc (loc, code, type, iref,
13004 build_int_cst (TREE_TYPE (iref), 0));
13008 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13009 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13010 if (TREE_CODE (arg0) == RSHIFT_EXPR
13011 && integer_zerop (arg1)
13012 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13014 tree arg00 = TREE_OPERAND (arg0, 0);
13015 tree arg01 = TREE_OPERAND (arg0, 1);
13016 tree itype = TREE_TYPE (arg00);
13017 if (TREE_INT_CST_HIGH (arg01) == 0
13018 && TREE_INT_CST_LOW (arg01)
13019 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
13021 if (TYPE_UNSIGNED (itype))
13023 itype = signed_type_for (itype);
13024 arg00 = fold_convert_loc (loc, itype, arg00);
13026 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13027 type, arg00, build_zero_cst (itype));
13031 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13032 if (integer_zerop (arg1)
13033 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13034 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13035 TREE_OPERAND (arg0, 1));
13037 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13038 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13039 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13040 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13041 build_zero_cst (TREE_TYPE (arg0)));
13042 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13043 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13044 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13045 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13046 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13047 build_zero_cst (TREE_TYPE (arg0)));
13049 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13050 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13051 && TREE_CODE (arg1) == INTEGER_CST
13052 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13053 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13054 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13055 TREE_OPERAND (arg0, 1), arg1));
13057 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13058 (X & C) == 0 when C is a single bit. */
13059 if (TREE_CODE (arg0) == BIT_AND_EXPR
13060 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13061 && integer_zerop (arg1)
13062 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13064 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13065 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13066 TREE_OPERAND (arg0, 1));
13067 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13068 type, tem,
13069 fold_convert_loc (loc, TREE_TYPE (arg0),
13070 arg1));
13073 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13074 constant C is a power of two, i.e. a single bit. */
13075 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13076 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13077 && integer_zerop (arg1)
13078 && integer_pow2p (TREE_OPERAND (arg0, 1))
13079 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13080 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13082 tree arg00 = TREE_OPERAND (arg0, 0);
13083 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13084 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13087 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13088 when is C is a power of two, i.e. a single bit. */
13089 if (TREE_CODE (arg0) == BIT_AND_EXPR
13090 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13091 && integer_zerop (arg1)
13092 && integer_pow2p (TREE_OPERAND (arg0, 1))
13093 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13094 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13096 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13097 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13098 arg000, TREE_OPERAND (arg0, 1));
13099 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13100 tem, build_int_cst (TREE_TYPE (tem), 0));
13103 if (integer_zerop (arg1)
13104 && tree_expr_nonzero_p (arg0))
13106 tree res = constant_boolean_node (code==NE_EXPR, type);
13107 return omit_one_operand_loc (loc, type, res, arg0);
13110 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13111 if (TREE_CODE (arg0) == NEGATE_EXPR
13112 && TREE_CODE (arg1) == NEGATE_EXPR)
13113 return fold_build2_loc (loc, code, type,
13114 TREE_OPERAND (arg0, 0),
13115 fold_convert_loc (loc, TREE_TYPE (arg0),
13116 TREE_OPERAND (arg1, 0)));
13118 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13119 if (TREE_CODE (arg0) == BIT_AND_EXPR
13120 && TREE_CODE (arg1) == BIT_AND_EXPR)
13122 tree arg00 = TREE_OPERAND (arg0, 0);
13123 tree arg01 = TREE_OPERAND (arg0, 1);
13124 tree arg10 = TREE_OPERAND (arg1, 0);
13125 tree arg11 = TREE_OPERAND (arg1, 1);
13126 tree itype = TREE_TYPE (arg0);
13128 if (operand_equal_p (arg01, arg11, 0))
13129 return fold_build2_loc (loc, code, type,
13130 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13131 fold_build2_loc (loc,
13132 BIT_XOR_EXPR, itype,
13133 arg00, arg10),
13134 arg01),
13135 build_zero_cst (itype));
13137 if (operand_equal_p (arg01, arg10, 0))
13138 return fold_build2_loc (loc, code, type,
13139 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13140 fold_build2_loc (loc,
13141 BIT_XOR_EXPR, itype,
13142 arg00, arg11),
13143 arg01),
13144 build_zero_cst (itype));
13146 if (operand_equal_p (arg00, arg11, 0))
13147 return fold_build2_loc (loc, code, type,
13148 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13149 fold_build2_loc (loc,
13150 BIT_XOR_EXPR, itype,
13151 arg01, arg10),
13152 arg00),
13153 build_zero_cst (itype));
13155 if (operand_equal_p (arg00, arg10, 0))
13156 return fold_build2_loc (loc, code, type,
13157 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13158 fold_build2_loc (loc,
13159 BIT_XOR_EXPR, itype,
13160 arg01, arg11),
13161 arg00),
13162 build_zero_cst (itype));
13165 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13166 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13168 tree arg00 = TREE_OPERAND (arg0, 0);
13169 tree arg01 = TREE_OPERAND (arg0, 1);
13170 tree arg10 = TREE_OPERAND (arg1, 0);
13171 tree arg11 = TREE_OPERAND (arg1, 1);
13172 tree itype = TREE_TYPE (arg0);
13174 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13175 operand_equal_p guarantees no side-effects so we don't need
13176 to use omit_one_operand on Z. */
13177 if (operand_equal_p (arg01, arg11, 0))
13178 return fold_build2_loc (loc, code, type, arg00,
13179 fold_convert_loc (loc, TREE_TYPE (arg00),
13180 arg10));
13181 if (operand_equal_p (arg01, arg10, 0))
13182 return fold_build2_loc (loc, code, type, arg00,
13183 fold_convert_loc (loc, TREE_TYPE (arg00),
13184 arg11));
13185 if (operand_equal_p (arg00, arg11, 0))
13186 return fold_build2_loc (loc, code, type, arg01,
13187 fold_convert_loc (loc, TREE_TYPE (arg01),
13188 arg10));
13189 if (operand_equal_p (arg00, arg10, 0))
13190 return fold_build2_loc (loc, code, type, arg01,
13191 fold_convert_loc (loc, TREE_TYPE (arg01),
13192 arg11));
13194 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13195 if (TREE_CODE (arg01) == INTEGER_CST
13196 && TREE_CODE (arg11) == INTEGER_CST)
13198 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13199 fold_convert_loc (loc, itype, arg11));
13200 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13201 return fold_build2_loc (loc, code, type, tem,
13202 fold_convert_loc (loc, itype, arg10));
13206 /* Attempt to simplify equality/inequality comparisons of complex
13207 values. Only lower the comparison if the result is known or
13208 can be simplified to a single scalar comparison. */
13209 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13210 || TREE_CODE (arg0) == COMPLEX_CST)
13211 && (TREE_CODE (arg1) == COMPLEX_EXPR
13212 || TREE_CODE (arg1) == COMPLEX_CST))
13214 tree real0, imag0, real1, imag1;
13215 tree rcond, icond;
13217 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13219 real0 = TREE_OPERAND (arg0, 0);
13220 imag0 = TREE_OPERAND (arg0, 1);
13222 else
13224 real0 = TREE_REALPART (arg0);
13225 imag0 = TREE_IMAGPART (arg0);
13228 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13230 real1 = TREE_OPERAND (arg1, 0);
13231 imag1 = TREE_OPERAND (arg1, 1);
13233 else
13235 real1 = TREE_REALPART (arg1);
13236 imag1 = TREE_IMAGPART (arg1);
13239 rcond = fold_binary_loc (loc, code, type, real0, real1);
13240 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13242 if (integer_zerop (rcond))
13244 if (code == EQ_EXPR)
13245 return omit_two_operands_loc (loc, type, boolean_false_node,
13246 imag0, imag1);
13247 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13249 else
13251 if (code == NE_EXPR)
13252 return omit_two_operands_loc (loc, type, boolean_true_node,
13253 imag0, imag1);
13254 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13258 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13259 if (icond && TREE_CODE (icond) == INTEGER_CST)
13261 if (integer_zerop (icond))
13263 if (code == EQ_EXPR)
13264 return omit_two_operands_loc (loc, type, boolean_false_node,
13265 real0, real1);
13266 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13268 else
13270 if (code == NE_EXPR)
13271 return omit_two_operands_loc (loc, type, boolean_true_node,
13272 real0, real1);
13273 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13278 return NULL_TREE;
13280 case LT_EXPR:
13281 case GT_EXPR:
13282 case LE_EXPR:
13283 case GE_EXPR:
13284 tem = fold_comparison (loc, code, type, op0, op1);
13285 if (tem != NULL_TREE)
13286 return tem;
13288 /* Transform comparisons of the form X +- C CMP X. */
13289 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13290 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13291 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13292 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13293 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13294 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13296 tree arg01 = TREE_OPERAND (arg0, 1);
13297 enum tree_code code0 = TREE_CODE (arg0);
13298 int is_positive;
13300 if (TREE_CODE (arg01) == REAL_CST)
13301 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13302 else
13303 is_positive = tree_int_cst_sgn (arg01);
13305 /* (X - c) > X becomes false. */
13306 if (code == GT_EXPR
13307 && ((code0 == MINUS_EXPR && is_positive >= 0)
13308 || (code0 == PLUS_EXPR && is_positive <= 0)))
13310 if (TREE_CODE (arg01) == INTEGER_CST
13311 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13312 fold_overflow_warning (("assuming signed overflow does not "
13313 "occur when assuming that (X - c) > X "
13314 "is always false"),
13315 WARN_STRICT_OVERFLOW_ALL);
13316 return constant_boolean_node (0, type);
13319 /* Likewise (X + c) < X becomes false. */
13320 if (code == LT_EXPR
13321 && ((code0 == PLUS_EXPR && is_positive >= 0)
13322 || (code0 == MINUS_EXPR && is_positive <= 0)))
13324 if (TREE_CODE (arg01) == INTEGER_CST
13325 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13326 fold_overflow_warning (("assuming signed overflow does not "
13327 "occur when assuming that "
13328 "(X + c) < X is always false"),
13329 WARN_STRICT_OVERFLOW_ALL);
13330 return constant_boolean_node (0, type);
13333 /* Convert (X - c) <= X to true. */
13334 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13335 && code == LE_EXPR
13336 && ((code0 == MINUS_EXPR && is_positive >= 0)
13337 || (code0 == PLUS_EXPR && is_positive <= 0)))
13339 if (TREE_CODE (arg01) == INTEGER_CST
13340 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13341 fold_overflow_warning (("assuming signed overflow does not "
13342 "occur when assuming that "
13343 "(X - c) <= X is always true"),
13344 WARN_STRICT_OVERFLOW_ALL);
13345 return constant_boolean_node (1, type);
13348 /* Convert (X + c) >= X to true. */
13349 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13350 && code == GE_EXPR
13351 && ((code0 == PLUS_EXPR && is_positive >= 0)
13352 || (code0 == MINUS_EXPR && is_positive <= 0)))
13354 if (TREE_CODE (arg01) == INTEGER_CST
13355 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13356 fold_overflow_warning (("assuming signed overflow does not "
13357 "occur when assuming that "
13358 "(X + c) >= X is always true"),
13359 WARN_STRICT_OVERFLOW_ALL);
13360 return constant_boolean_node (1, type);
13363 if (TREE_CODE (arg01) == INTEGER_CST)
13365 /* Convert X + c > X and X - c < X to true for integers. */
13366 if (code == GT_EXPR
13367 && ((code0 == PLUS_EXPR && is_positive > 0)
13368 || (code0 == MINUS_EXPR && is_positive < 0)))
13370 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13371 fold_overflow_warning (("assuming signed overflow does "
13372 "not occur when assuming that "
13373 "(X + c) > X is always true"),
13374 WARN_STRICT_OVERFLOW_ALL);
13375 return constant_boolean_node (1, type);
13378 if (code == LT_EXPR
13379 && ((code0 == MINUS_EXPR && is_positive > 0)
13380 || (code0 == PLUS_EXPR && is_positive < 0)))
13382 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13383 fold_overflow_warning (("assuming signed overflow does "
13384 "not occur when assuming that "
13385 "(X - c) < X is always true"),
13386 WARN_STRICT_OVERFLOW_ALL);
13387 return constant_boolean_node (1, type);
13390 /* Convert X + c <= X and X - c >= X to false for integers. */
13391 if (code == LE_EXPR
13392 && ((code0 == PLUS_EXPR && is_positive > 0)
13393 || (code0 == MINUS_EXPR && is_positive < 0)))
13395 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13396 fold_overflow_warning (("assuming signed overflow does "
13397 "not occur when assuming that "
13398 "(X + c) <= X is always false"),
13399 WARN_STRICT_OVERFLOW_ALL);
13400 return constant_boolean_node (0, type);
13403 if (code == GE_EXPR
13404 && ((code0 == MINUS_EXPR && is_positive > 0)
13405 || (code0 == PLUS_EXPR && is_positive < 0)))
13407 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13408 fold_overflow_warning (("assuming signed overflow does "
13409 "not occur when assuming that "
13410 "(X - c) >= X is always false"),
13411 WARN_STRICT_OVERFLOW_ALL);
13412 return constant_boolean_node (0, type);
13417 /* Comparisons with the highest or lowest possible integer of
13418 the specified precision will have known values. */
13420 tree arg1_type = TREE_TYPE (arg1);
13421 unsigned int width = TYPE_PRECISION (arg1_type);
13423 if (TREE_CODE (arg1) == INTEGER_CST
13424 && width <= HOST_BITS_PER_DOUBLE_INT
13425 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13427 HOST_WIDE_INT signed_max_hi;
13428 unsigned HOST_WIDE_INT signed_max_lo;
13429 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13431 if (width <= HOST_BITS_PER_WIDE_INT)
13433 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13434 - 1;
13435 signed_max_hi = 0;
13436 max_hi = 0;
13438 if (TYPE_UNSIGNED (arg1_type))
13440 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13441 min_lo = 0;
13442 min_hi = 0;
13444 else
13446 max_lo = signed_max_lo;
13447 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13448 min_hi = -1;
13451 else
13453 width -= HOST_BITS_PER_WIDE_INT;
13454 signed_max_lo = -1;
13455 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13456 - 1;
13457 max_lo = -1;
13458 min_lo = 0;
13460 if (TYPE_UNSIGNED (arg1_type))
13462 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13463 min_hi = 0;
13465 else
13467 max_hi = signed_max_hi;
13468 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13472 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13473 && TREE_INT_CST_LOW (arg1) == max_lo)
13474 switch (code)
13476 case GT_EXPR:
13477 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13479 case GE_EXPR:
13480 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13482 case LE_EXPR:
13483 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13485 case LT_EXPR:
13486 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13488 /* The GE_EXPR and LT_EXPR cases above are not normally
13489 reached because of previous transformations. */
13491 default:
13492 break;
13494 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13495 == max_hi
13496 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13497 switch (code)
13499 case GT_EXPR:
13500 arg1 = const_binop (PLUS_EXPR, arg1,
13501 build_int_cst (TREE_TYPE (arg1), 1));
13502 return fold_build2_loc (loc, EQ_EXPR, type,
13503 fold_convert_loc (loc,
13504 TREE_TYPE (arg1), arg0),
13505 arg1);
13506 case LE_EXPR:
13507 arg1 = const_binop (PLUS_EXPR, arg1,
13508 build_int_cst (TREE_TYPE (arg1), 1));
13509 return fold_build2_loc (loc, NE_EXPR, type,
13510 fold_convert_loc (loc, TREE_TYPE (arg1),
13511 arg0),
13512 arg1);
13513 default:
13514 break;
13516 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13517 == min_hi
13518 && TREE_INT_CST_LOW (arg1) == min_lo)
13519 switch (code)
13521 case LT_EXPR:
13522 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13524 case LE_EXPR:
13525 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13527 case GE_EXPR:
13528 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13530 case GT_EXPR:
13531 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13533 default:
13534 break;
13536 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13537 == min_hi
13538 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13539 switch (code)
13541 case GE_EXPR:
13542 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13543 return fold_build2_loc (loc, NE_EXPR, type,
13544 fold_convert_loc (loc,
13545 TREE_TYPE (arg1), arg0),
13546 arg1);
13547 case LT_EXPR:
13548 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13549 return fold_build2_loc (loc, EQ_EXPR, type,
13550 fold_convert_loc (loc, TREE_TYPE (arg1),
13551 arg0),
13552 arg1);
13553 default:
13554 break;
13557 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13558 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13559 && TYPE_UNSIGNED (arg1_type)
13560 /* We will flip the signedness of the comparison operator
13561 associated with the mode of arg1, so the sign bit is
13562 specified by this mode. Check that arg1 is the signed
13563 max associated with this sign bit. */
13564 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13565 /* signed_type does not work on pointer types. */
13566 && INTEGRAL_TYPE_P (arg1_type))
13568 /* The following case also applies to X < signed_max+1
13569 and X >= signed_max+1 because previous transformations. */
13570 if (code == LE_EXPR || code == GT_EXPR)
13572 tree st;
13573 st = signed_type_for (TREE_TYPE (arg1));
13574 return fold_build2_loc (loc,
13575 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13576 type, fold_convert_loc (loc, st, arg0),
13577 build_int_cst (st, 0));
13583 /* If we are comparing an ABS_EXPR with a constant, we can
13584 convert all the cases into explicit comparisons, but they may
13585 well not be faster than doing the ABS and one comparison.
13586 But ABS (X) <= C is a range comparison, which becomes a subtraction
13587 and a comparison, and is probably faster. */
13588 if (code == LE_EXPR
13589 && TREE_CODE (arg1) == INTEGER_CST
13590 && TREE_CODE (arg0) == ABS_EXPR
13591 && ! TREE_SIDE_EFFECTS (arg0)
13592 && (0 != (tem = negate_expr (arg1)))
13593 && TREE_CODE (tem) == INTEGER_CST
13594 && !TREE_OVERFLOW (tem))
13595 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13596 build2 (GE_EXPR, type,
13597 TREE_OPERAND (arg0, 0), tem),
13598 build2 (LE_EXPR, type,
13599 TREE_OPERAND (arg0, 0), arg1));
13601 /* Convert ABS_EXPR<x> >= 0 to true. */
13602 strict_overflow_p = false;
13603 if (code == GE_EXPR
13604 && (integer_zerop (arg1)
13605 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13606 && real_zerop (arg1)))
13607 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13609 if (strict_overflow_p)
13610 fold_overflow_warning (("assuming signed overflow does not occur "
13611 "when simplifying comparison of "
13612 "absolute value and zero"),
13613 WARN_STRICT_OVERFLOW_CONDITIONAL);
13614 return omit_one_operand_loc (loc, type,
13615 constant_boolean_node (true, type),
13616 arg0);
13619 /* Convert ABS_EXPR<x> < 0 to false. */
13620 strict_overflow_p = false;
13621 if (code == LT_EXPR
13622 && (integer_zerop (arg1) || real_zerop (arg1))
13623 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13625 if (strict_overflow_p)
13626 fold_overflow_warning (("assuming signed overflow does not occur "
13627 "when simplifying comparison of "
13628 "absolute value and zero"),
13629 WARN_STRICT_OVERFLOW_CONDITIONAL);
13630 return omit_one_operand_loc (loc, type,
13631 constant_boolean_node (false, type),
13632 arg0);
13635 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13636 and similarly for >= into !=. */
13637 if ((code == LT_EXPR || code == GE_EXPR)
13638 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13639 && TREE_CODE (arg1) == LSHIFT_EXPR
13640 && integer_onep (TREE_OPERAND (arg1, 0)))
13641 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13642 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13643 TREE_OPERAND (arg1, 1)),
13644 build_zero_cst (TREE_TYPE (arg0)));
13646 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13647 otherwise Y might be >= # of bits in X's type and thus e.g.
13648 (unsigned char) (1 << Y) for Y 15 might be 0.
13649 If the cast is widening, then 1 << Y should have unsigned type,
13650 otherwise if Y is number of bits in the signed shift type minus 1,
13651 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13652 31 might be 0xffffffff80000000. */
13653 if ((code == LT_EXPR || code == GE_EXPR)
13654 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13655 && CONVERT_EXPR_P (arg1)
13656 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13657 && (TYPE_PRECISION (TREE_TYPE (arg1))
13658 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13659 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13660 || (TYPE_PRECISION (TREE_TYPE (arg1))
13661 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13662 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13664 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13665 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13666 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13667 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13668 build_zero_cst (TREE_TYPE (arg0)));
13671 return NULL_TREE;
13673 case UNORDERED_EXPR:
13674 case ORDERED_EXPR:
13675 case UNLT_EXPR:
13676 case UNLE_EXPR:
13677 case UNGT_EXPR:
13678 case UNGE_EXPR:
13679 case UNEQ_EXPR:
13680 case LTGT_EXPR:
13681 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13683 t1 = fold_relational_const (code, type, arg0, arg1);
13684 if (t1 != NULL_TREE)
13685 return t1;
13688 /* If the first operand is NaN, the result is constant. */
13689 if (TREE_CODE (arg0) == REAL_CST
13690 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13691 && (code != LTGT_EXPR || ! flag_trapping_math))
13693 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13694 ? integer_zero_node
13695 : integer_one_node;
13696 return omit_one_operand_loc (loc, type, t1, arg1);
13699 /* If the second operand is NaN, the result is constant. */
13700 if (TREE_CODE (arg1) == REAL_CST
13701 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13702 && (code != LTGT_EXPR || ! flag_trapping_math))
13704 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13705 ? integer_zero_node
13706 : integer_one_node;
13707 return omit_one_operand_loc (loc, type, t1, arg0);
13710 /* Simplify unordered comparison of something with itself. */
13711 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13712 && operand_equal_p (arg0, arg1, 0))
13713 return constant_boolean_node (1, type);
13715 if (code == LTGT_EXPR
13716 && !flag_trapping_math
13717 && operand_equal_p (arg0, arg1, 0))
13718 return constant_boolean_node (0, type);
13720 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13722 tree targ0 = strip_float_extensions (arg0);
13723 tree targ1 = strip_float_extensions (arg1);
13724 tree newtype = TREE_TYPE (targ0);
13726 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13727 newtype = TREE_TYPE (targ1);
13729 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13730 return fold_build2_loc (loc, code, type,
13731 fold_convert_loc (loc, newtype, targ0),
13732 fold_convert_loc (loc, newtype, targ1));
13735 return NULL_TREE;
13737 case COMPOUND_EXPR:
13738 /* When pedantic, a compound expression can be neither an lvalue
13739 nor an integer constant expression. */
13740 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13741 return NULL_TREE;
13742 /* Don't let (0, 0) be null pointer constant. */
13743 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13744 : fold_convert_loc (loc, type, arg1);
13745 return pedantic_non_lvalue_loc (loc, tem);
13747 case COMPLEX_EXPR:
13748 if ((TREE_CODE (arg0) == REAL_CST
13749 && TREE_CODE (arg1) == REAL_CST)
13750 || (TREE_CODE (arg0) == INTEGER_CST
13751 && TREE_CODE (arg1) == INTEGER_CST))
13752 return build_complex (type, arg0, arg1);
13753 if (TREE_CODE (arg0) == REALPART_EXPR
13754 && TREE_CODE (arg1) == IMAGPART_EXPR
13755 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13756 && operand_equal_p (TREE_OPERAND (arg0, 0),
13757 TREE_OPERAND (arg1, 0), 0))
13758 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13759 TREE_OPERAND (arg1, 0));
13760 return NULL_TREE;
13762 case ASSERT_EXPR:
13763 /* An ASSERT_EXPR should never be passed to fold_binary. */
13764 gcc_unreachable ();
13766 case VEC_PACK_TRUNC_EXPR:
13767 case VEC_PACK_FIX_TRUNC_EXPR:
13769 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13770 tree *elts;
13772 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13773 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13774 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13775 return NULL_TREE;
13777 elts = XALLOCAVEC (tree, nelts);
13778 if (!vec_cst_ctor_to_array (arg0, elts)
13779 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13780 return NULL_TREE;
13782 for (i = 0; i < nelts; i++)
13784 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13785 ? NOP_EXPR : FIX_TRUNC_EXPR,
13786 TREE_TYPE (type), elts[i]);
13787 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13788 return NULL_TREE;
13791 return build_vector (type, elts);
13794 case VEC_WIDEN_MULT_LO_EXPR:
13795 case VEC_WIDEN_MULT_HI_EXPR:
13796 case VEC_WIDEN_MULT_EVEN_EXPR:
13797 case VEC_WIDEN_MULT_ODD_EXPR:
13799 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13800 unsigned int out, ofs, scale;
13801 tree *elts;
13803 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13804 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13805 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13806 return NULL_TREE;
13808 elts = XALLOCAVEC (tree, nelts * 4);
13809 if (!vec_cst_ctor_to_array (arg0, elts)
13810 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13811 return NULL_TREE;
13813 if (code == VEC_WIDEN_MULT_LO_EXPR)
13814 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13815 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13816 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13817 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13818 scale = 1, ofs = 0;
13819 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13820 scale = 1, ofs = 1;
13822 for (out = 0; out < nelts; out++)
13824 unsigned int in1 = (out << scale) + ofs;
13825 unsigned int in2 = in1 + nelts * 2;
13826 tree t1, t2;
13828 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13829 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13831 if (t1 == NULL_TREE || t2 == NULL_TREE)
13832 return NULL_TREE;
13833 elts[out] = const_binop (MULT_EXPR, t1, t2);
13834 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13835 return NULL_TREE;
13838 return build_vector (type, elts);
13841 default:
13842 return NULL_TREE;
13843 } /* switch (code) */
13846 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13847 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13848 of GOTO_EXPR. */
13850 static tree
13851 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13853 switch (TREE_CODE (*tp))
13855 case LABEL_EXPR:
13856 return *tp;
13858 case GOTO_EXPR:
13859 *walk_subtrees = 0;
13861 /* ... fall through ... */
13863 default:
13864 return NULL_TREE;
13868 /* Return whether the sub-tree ST contains a label which is accessible from
13869 outside the sub-tree. */
13871 static bool
13872 contains_label_p (tree st)
13874 return
13875 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13878 /* Fold a ternary expression of code CODE and type TYPE with operands
13879 OP0, OP1, and OP2. Return the folded expression if folding is
13880 successful. Otherwise, return NULL_TREE. */
13882 tree
13883 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13884 tree op0, tree op1, tree op2)
13886 tree tem;
13887 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13888 enum tree_code_class kind = TREE_CODE_CLASS (code);
13890 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13891 && TREE_CODE_LENGTH (code) == 3);
13893 /* Strip any conversions that don't change the mode. This is safe
13894 for every expression, except for a comparison expression because
13895 its signedness is derived from its operands. So, in the latter
13896 case, only strip conversions that don't change the signedness.
13898 Note that this is done as an internal manipulation within the
13899 constant folder, in order to find the simplest representation of
13900 the arguments so that their form can be studied. In any cases,
13901 the appropriate type conversions should be put back in the tree
13902 that will get out of the constant folder. */
13903 if (op0)
13905 arg0 = op0;
13906 STRIP_NOPS (arg0);
13909 if (op1)
13911 arg1 = op1;
13912 STRIP_NOPS (arg1);
13915 if (op2)
13917 arg2 = op2;
13918 STRIP_NOPS (arg2);
13921 switch (code)
13923 case COMPONENT_REF:
13924 if (TREE_CODE (arg0) == CONSTRUCTOR
13925 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13927 unsigned HOST_WIDE_INT idx;
13928 tree field, value;
13929 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13930 if (field == arg1)
13931 return value;
13933 return NULL_TREE;
13935 case COND_EXPR:
13936 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13937 so all simple results must be passed through pedantic_non_lvalue. */
13938 if (TREE_CODE (arg0) == INTEGER_CST)
13940 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13941 tem = integer_zerop (arg0) ? op2 : op1;
13942 /* Only optimize constant conditions when the selected branch
13943 has the same type as the COND_EXPR. This avoids optimizing
13944 away "c ? x : throw", where the throw has a void type.
13945 Avoid throwing away that operand which contains label. */
13946 if ((!TREE_SIDE_EFFECTS (unused_op)
13947 || !contains_label_p (unused_op))
13948 && (! VOID_TYPE_P (TREE_TYPE (tem))
13949 || VOID_TYPE_P (type)))
13950 return pedantic_non_lvalue_loc (loc, tem);
13951 return NULL_TREE;
13953 if (operand_equal_p (arg1, op2, 0))
13954 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13956 /* If we have A op B ? A : C, we may be able to convert this to a
13957 simpler expression, depending on the operation and the values
13958 of B and C. Signed zeros prevent all of these transformations,
13959 for reasons given above each one.
13961 Also try swapping the arguments and inverting the conditional. */
13962 if (COMPARISON_CLASS_P (arg0)
13963 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13964 arg1, TREE_OPERAND (arg0, 1))
13965 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13967 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13968 if (tem)
13969 return tem;
13972 if (COMPARISON_CLASS_P (arg0)
13973 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13974 op2,
13975 TREE_OPERAND (arg0, 1))
13976 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13978 location_t loc0 = expr_location_or (arg0, loc);
13979 tem = fold_truth_not_expr (loc0, arg0);
13980 if (tem && COMPARISON_CLASS_P (tem))
13982 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13983 if (tem)
13984 return tem;
13988 /* If the second operand is simpler than the third, swap them
13989 since that produces better jump optimization results. */
13990 if (truth_value_p (TREE_CODE (arg0))
13991 && tree_swap_operands_p (op1, op2, false))
13993 location_t loc0 = expr_location_or (arg0, loc);
13994 /* See if this can be inverted. If it can't, possibly because
13995 it was a floating-point inequality comparison, don't do
13996 anything. */
13997 tem = fold_truth_not_expr (loc0, arg0);
13998 if (tem)
13999 return fold_build3_loc (loc, code, type, tem, op2, op1);
14002 /* Convert A ? 1 : 0 to simply A. */
14003 if (integer_onep (op1)
14004 && integer_zerop (op2)
14005 /* If we try to convert OP0 to our type, the
14006 call to fold will try to move the conversion inside
14007 a COND, which will recurse. In that case, the COND_EXPR
14008 is probably the best choice, so leave it alone. */
14009 && type == TREE_TYPE (arg0))
14010 return pedantic_non_lvalue_loc (loc, arg0);
14012 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14013 over COND_EXPR in cases such as floating point comparisons. */
14014 if (integer_zerop (op1)
14015 && integer_onep (op2)
14016 && truth_value_p (TREE_CODE (arg0)))
14017 return pedantic_non_lvalue_loc (loc,
14018 fold_convert_loc (loc, type,
14019 invert_truthvalue_loc (loc,
14020 arg0)));
14022 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14023 if (TREE_CODE (arg0) == LT_EXPR
14024 && integer_zerop (TREE_OPERAND (arg0, 1))
14025 && integer_zerop (op2)
14026 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14028 /* sign_bit_p only checks ARG1 bits within A's precision.
14029 If <sign bit of A> has wider type than A, bits outside
14030 of A's precision in <sign bit of A> need to be checked.
14031 If they are all 0, this optimization needs to be done
14032 in unsigned A's type, if they are all 1 in signed A's type,
14033 otherwise this can't be done. */
14034 if (TYPE_PRECISION (TREE_TYPE (tem))
14035 < TYPE_PRECISION (TREE_TYPE (arg1))
14036 && TYPE_PRECISION (TREE_TYPE (tem))
14037 < TYPE_PRECISION (type))
14039 unsigned HOST_WIDE_INT mask_lo;
14040 HOST_WIDE_INT mask_hi;
14041 int inner_width, outer_width;
14042 tree tem_type;
14044 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14045 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14046 if (outer_width > TYPE_PRECISION (type))
14047 outer_width = TYPE_PRECISION (type);
14049 if (outer_width > HOST_BITS_PER_WIDE_INT)
14051 mask_hi = ((unsigned HOST_WIDE_INT) -1
14052 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14053 mask_lo = -1;
14055 else
14057 mask_hi = 0;
14058 mask_lo = ((unsigned HOST_WIDE_INT) -1
14059 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14061 if (inner_width > HOST_BITS_PER_WIDE_INT)
14063 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
14064 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14065 mask_lo = 0;
14067 else
14068 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
14069 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14071 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14072 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14074 tem_type = signed_type_for (TREE_TYPE (tem));
14075 tem = fold_convert_loc (loc, tem_type, tem);
14077 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14078 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14080 tem_type = unsigned_type_for (TREE_TYPE (tem));
14081 tem = fold_convert_loc (loc, tem_type, tem);
14083 else
14084 tem = NULL;
14087 if (tem)
14088 return
14089 fold_convert_loc (loc, type,
14090 fold_build2_loc (loc, BIT_AND_EXPR,
14091 TREE_TYPE (tem), tem,
14092 fold_convert_loc (loc,
14093 TREE_TYPE (tem),
14094 arg1)));
14097 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14098 already handled above. */
14099 if (TREE_CODE (arg0) == BIT_AND_EXPR
14100 && integer_onep (TREE_OPERAND (arg0, 1))
14101 && integer_zerop (op2)
14102 && integer_pow2p (arg1))
14104 tree tem = TREE_OPERAND (arg0, 0);
14105 STRIP_NOPS (tem);
14106 if (TREE_CODE (tem) == RSHIFT_EXPR
14107 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14108 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14109 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14110 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14111 TREE_OPERAND (tem, 0), arg1);
14114 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14115 is probably obsolete because the first operand should be a
14116 truth value (that's why we have the two cases above), but let's
14117 leave it in until we can confirm this for all front-ends. */
14118 if (integer_zerop (op2)
14119 && TREE_CODE (arg0) == NE_EXPR
14120 && integer_zerop (TREE_OPERAND (arg0, 1))
14121 && integer_pow2p (arg1)
14122 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14123 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14124 arg1, OEP_ONLY_CONST))
14125 return pedantic_non_lvalue_loc (loc,
14126 fold_convert_loc (loc, type,
14127 TREE_OPERAND (arg0, 0)));
14129 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14130 if (integer_zerop (op2)
14131 && truth_value_p (TREE_CODE (arg0))
14132 && truth_value_p (TREE_CODE (arg1)))
14133 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14134 fold_convert_loc (loc, type, arg0),
14135 arg1);
14137 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14138 if (integer_onep (op2)
14139 && truth_value_p (TREE_CODE (arg0))
14140 && truth_value_p (TREE_CODE (arg1)))
14142 location_t loc0 = expr_location_or (arg0, loc);
14143 /* Only perform transformation if ARG0 is easily inverted. */
14144 tem = fold_truth_not_expr (loc0, arg0);
14145 if (tem)
14146 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14147 fold_convert_loc (loc, type, tem),
14148 arg1);
14151 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14152 if (integer_zerop (arg1)
14153 && truth_value_p (TREE_CODE (arg0))
14154 && truth_value_p (TREE_CODE (op2)))
14156 location_t loc0 = expr_location_or (arg0, loc);
14157 /* Only perform transformation if ARG0 is easily inverted. */
14158 tem = fold_truth_not_expr (loc0, arg0);
14159 if (tem)
14160 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14161 fold_convert_loc (loc, type, tem),
14162 op2);
14165 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14166 if (integer_onep (arg1)
14167 && truth_value_p (TREE_CODE (arg0))
14168 && truth_value_p (TREE_CODE (op2)))
14169 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14170 fold_convert_loc (loc, type, arg0),
14171 op2);
14173 return NULL_TREE;
14175 case VEC_COND_EXPR:
14176 if (TREE_CODE (arg0) == VECTOR_CST)
14178 if (integer_all_onesp (arg0) && !TREE_SIDE_EFFECTS (op2))
14179 return pedantic_non_lvalue_loc (loc, op1);
14180 if (integer_zerop (arg0) && !TREE_SIDE_EFFECTS (op1))
14181 return pedantic_non_lvalue_loc (loc, op2);
14183 return NULL_TREE;
14185 case CALL_EXPR:
14186 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14187 of fold_ternary on them. */
14188 gcc_unreachable ();
14190 case BIT_FIELD_REF:
14191 if ((TREE_CODE (arg0) == VECTOR_CST
14192 || (TREE_CODE (arg0) == CONSTRUCTOR
14193 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14194 && (type == TREE_TYPE (TREE_TYPE (arg0))
14195 || (TREE_CODE (type) == VECTOR_TYPE
14196 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14198 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14199 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14200 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14201 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14203 if (n != 0
14204 && (idx % width) == 0
14205 && (n % width) == 0
14206 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14208 idx = idx / width;
14209 n = n / width;
14211 if (TREE_CODE (arg0) == VECTOR_CST)
14213 if (n == 1)
14214 return VECTOR_CST_ELT (arg0, idx);
14216 tree *vals = XALLOCAVEC (tree, n);
14217 for (unsigned i = 0; i < n; ++i)
14218 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14219 return build_vector (type, vals);
14222 /* Constructor elements can be subvectors. */
14223 unsigned HOST_WIDE_INT k = 1;
14224 if (CONSTRUCTOR_NELTS (arg0) != 0)
14226 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14227 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14228 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14231 /* We keep an exact subset of the constructor elements. */
14232 if ((idx % k) == 0 && (n % k) == 0)
14234 if (CONSTRUCTOR_NELTS (arg0) == 0)
14235 return build_constructor (type, NULL);
14236 idx /= k;
14237 n /= k;
14238 if (n == 1)
14240 if (idx < CONSTRUCTOR_NELTS (arg0))
14241 return CONSTRUCTOR_ELT (arg0, idx)->value;
14242 return build_zero_cst (type);
14245 vec<constructor_elt, va_gc> *vals;
14246 vec_alloc (vals, n);
14247 for (unsigned i = 0;
14248 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14249 ++i)
14250 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14251 CONSTRUCTOR_ELT
14252 (arg0, idx + i)->value);
14253 return build_constructor (type, vals);
14255 /* The bitfield references a single constructor element. */
14256 else if (idx + n <= (idx / k + 1) * k)
14258 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14259 return build_zero_cst (type);
14260 else if (n == k)
14261 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14262 else
14263 return fold_build3_loc (loc, code, type,
14264 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14265 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14270 /* A bit-field-ref that referenced the full argument can be stripped. */
14271 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14272 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14273 && integer_zerop (op2))
14274 return fold_convert_loc (loc, type, arg0);
14276 /* On constants we can use native encode/interpret to constant
14277 fold (nearly) all BIT_FIELD_REFs. */
14278 if (CONSTANT_CLASS_P (arg0)
14279 && can_native_interpret_type_p (type)
14280 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14281 /* This limitation should not be necessary, we just need to
14282 round this up to mode size. */
14283 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14284 /* Need bit-shifting of the buffer to relax the following. */
14285 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14287 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14288 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14289 unsigned HOST_WIDE_INT clen;
14290 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14291 /* ??? We cannot tell native_encode_expr to start at
14292 some random byte only. So limit us to a reasonable amount
14293 of work. */
14294 if (clen <= 4096)
14296 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14297 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14298 if (len > 0
14299 && len * BITS_PER_UNIT >= bitpos + bitsize)
14301 tree v = native_interpret_expr (type,
14302 b + bitpos / BITS_PER_UNIT,
14303 bitsize / BITS_PER_UNIT);
14304 if (v)
14305 return v;
14310 return NULL_TREE;
14312 case FMA_EXPR:
14313 /* For integers we can decompose the FMA if possible. */
14314 if (TREE_CODE (arg0) == INTEGER_CST
14315 && TREE_CODE (arg1) == INTEGER_CST)
14316 return fold_build2_loc (loc, PLUS_EXPR, type,
14317 const_binop (MULT_EXPR, arg0, arg1), arg2);
14318 if (integer_zerop (arg2))
14319 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14321 return fold_fma (loc, type, arg0, arg1, arg2);
14323 case VEC_PERM_EXPR:
14324 if (TREE_CODE (arg2) == VECTOR_CST)
14326 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14327 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14328 tree t;
14329 bool need_mask_canon = false;
14330 bool all_in_vec0 = true;
14331 bool all_in_vec1 = true;
14332 bool maybe_identity = true;
14333 bool single_arg = (op0 == op1);
14334 bool changed = false;
14336 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14337 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14338 for (i = 0; i < nelts; i++)
14340 tree val = VECTOR_CST_ELT (arg2, i);
14341 if (TREE_CODE (val) != INTEGER_CST)
14342 return NULL_TREE;
14344 sel[i] = TREE_INT_CST_LOW (val) & mask;
14345 if (TREE_INT_CST_HIGH (val)
14346 || ((unsigned HOST_WIDE_INT)
14347 TREE_INT_CST_LOW (val) != sel[i]))
14348 need_mask_canon = true;
14350 if (sel[i] < nelts)
14351 all_in_vec1 = false;
14352 else
14353 all_in_vec0 = false;
14355 if ((sel[i] & (nelts-1)) != i)
14356 maybe_identity = false;
14359 if (maybe_identity)
14361 if (all_in_vec0)
14362 return op0;
14363 if (all_in_vec1)
14364 return op1;
14367 if (all_in_vec0)
14368 op1 = op0;
14369 else if (all_in_vec1)
14371 op0 = op1;
14372 for (i = 0; i < nelts; i++)
14373 sel[i] -= nelts;
14374 need_mask_canon = true;
14377 if ((TREE_CODE (op0) == VECTOR_CST
14378 || TREE_CODE (op0) == CONSTRUCTOR)
14379 && (TREE_CODE (op1) == VECTOR_CST
14380 || TREE_CODE (op1) == CONSTRUCTOR))
14382 t = fold_vec_perm (type, op0, op1, sel);
14383 if (t != NULL_TREE)
14384 return t;
14387 if (op0 == op1 && !single_arg)
14388 changed = true;
14390 if (need_mask_canon && arg2 == op2)
14392 tree *tsel = XALLOCAVEC (tree, nelts);
14393 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14394 for (i = 0; i < nelts; i++)
14395 tsel[i] = build_int_cst (eltype, sel[i]);
14396 op2 = build_vector (TREE_TYPE (arg2), tsel);
14397 changed = true;
14400 if (changed)
14401 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14403 return NULL_TREE;
14405 default:
14406 return NULL_TREE;
14407 } /* switch (code) */
14410 /* Perform constant folding and related simplification of EXPR.
14411 The related simplifications include x*1 => x, x*0 => 0, etc.,
14412 and application of the associative law.
14413 NOP_EXPR conversions may be removed freely (as long as we
14414 are careful not to change the type of the overall expression).
14415 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14416 but we can constant-fold them if they have constant operands. */
14418 #ifdef ENABLE_FOLD_CHECKING
14419 # define fold(x) fold_1 (x)
14420 static tree fold_1 (tree);
14421 static
14422 #endif
14423 tree
14424 fold (tree expr)
14426 const tree t = expr;
14427 enum tree_code code = TREE_CODE (t);
14428 enum tree_code_class kind = TREE_CODE_CLASS (code);
14429 tree tem;
14430 location_t loc = EXPR_LOCATION (expr);
14432 /* Return right away if a constant. */
14433 if (kind == tcc_constant)
14434 return t;
14436 /* CALL_EXPR-like objects with variable numbers of operands are
14437 treated specially. */
14438 if (kind == tcc_vl_exp)
14440 if (code == CALL_EXPR)
14442 tem = fold_call_expr (loc, expr, false);
14443 return tem ? tem : expr;
14445 return expr;
14448 if (IS_EXPR_CODE_CLASS (kind))
14450 tree type = TREE_TYPE (t);
14451 tree op0, op1, op2;
14453 switch (TREE_CODE_LENGTH (code))
14455 case 1:
14456 op0 = TREE_OPERAND (t, 0);
14457 tem = fold_unary_loc (loc, code, type, op0);
14458 return tem ? tem : expr;
14459 case 2:
14460 op0 = TREE_OPERAND (t, 0);
14461 op1 = TREE_OPERAND (t, 1);
14462 tem = fold_binary_loc (loc, code, type, op0, op1);
14463 return tem ? tem : expr;
14464 case 3:
14465 op0 = TREE_OPERAND (t, 0);
14466 op1 = TREE_OPERAND (t, 1);
14467 op2 = TREE_OPERAND (t, 2);
14468 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14469 return tem ? tem : expr;
14470 default:
14471 break;
14475 switch (code)
14477 case ARRAY_REF:
14479 tree op0 = TREE_OPERAND (t, 0);
14480 tree op1 = TREE_OPERAND (t, 1);
14482 if (TREE_CODE (op1) == INTEGER_CST
14483 && TREE_CODE (op0) == CONSTRUCTOR
14484 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14486 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14487 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14488 unsigned HOST_WIDE_INT begin = 0;
14490 /* Find a matching index by means of a binary search. */
14491 while (begin != end)
14493 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14494 tree index = (*elts)[middle].index;
14496 if (TREE_CODE (index) == INTEGER_CST
14497 && tree_int_cst_lt (index, op1))
14498 begin = middle + 1;
14499 else if (TREE_CODE (index) == INTEGER_CST
14500 && tree_int_cst_lt (op1, index))
14501 end = middle;
14502 else if (TREE_CODE (index) == RANGE_EXPR
14503 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14504 begin = middle + 1;
14505 else if (TREE_CODE (index) == RANGE_EXPR
14506 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14507 end = middle;
14508 else
14509 return (*elts)[middle].value;
14513 return t;
14516 /* Return a VECTOR_CST if possible. */
14517 case CONSTRUCTOR:
14519 tree type = TREE_TYPE (t);
14520 if (TREE_CODE (type) != VECTOR_TYPE)
14521 return t;
14523 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14524 unsigned HOST_WIDE_INT idx, pos = 0;
14525 tree value;
14527 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14529 if (!CONSTANT_CLASS_P (value))
14530 return t;
14531 if (TREE_CODE (value) == VECTOR_CST)
14533 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14534 vec[pos++] = VECTOR_CST_ELT (value, i);
14536 else
14537 vec[pos++] = value;
14539 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14540 vec[pos] = build_zero_cst (TREE_TYPE (type));
14542 return build_vector (type, vec);
14545 case CONST_DECL:
14546 return fold (DECL_INITIAL (t));
14548 default:
14549 return t;
14550 } /* switch (code) */
14553 #ifdef ENABLE_FOLD_CHECKING
14554 #undef fold
14556 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14557 hash_table <pointer_hash <tree_node> >);
14558 static void fold_check_failed (const_tree, const_tree);
14559 void print_fold_checksum (const_tree);
14561 /* When --enable-checking=fold, compute a digest of expr before
14562 and after actual fold call to see if fold did not accidentally
14563 change original expr. */
14565 tree
14566 fold (tree expr)
14568 tree ret;
14569 struct md5_ctx ctx;
14570 unsigned char checksum_before[16], checksum_after[16];
14571 hash_table <pointer_hash <tree_node> > ht;
14573 ht.create (32);
14574 md5_init_ctx (&ctx);
14575 fold_checksum_tree (expr, &ctx, ht);
14576 md5_finish_ctx (&ctx, checksum_before);
14577 ht.empty ();
14579 ret = fold_1 (expr);
14581 md5_init_ctx (&ctx);
14582 fold_checksum_tree (expr, &ctx, ht);
14583 md5_finish_ctx (&ctx, checksum_after);
14584 ht.dispose ();
14586 if (memcmp (checksum_before, checksum_after, 16))
14587 fold_check_failed (expr, ret);
14589 return ret;
14592 void
14593 print_fold_checksum (const_tree expr)
14595 struct md5_ctx ctx;
14596 unsigned char checksum[16], cnt;
14597 hash_table <pointer_hash <tree_node> > ht;
14599 ht.create (32);
14600 md5_init_ctx (&ctx);
14601 fold_checksum_tree (expr, &ctx, ht);
14602 md5_finish_ctx (&ctx, checksum);
14603 ht.dispose ();
14604 for (cnt = 0; cnt < 16; ++cnt)
14605 fprintf (stderr, "%02x", checksum[cnt]);
14606 putc ('\n', stderr);
14609 static void
14610 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14612 internal_error ("fold check: original tree changed by fold");
14615 static void
14616 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14617 hash_table <pointer_hash <tree_node> > ht)
14619 tree_node **slot;
14620 enum tree_code code;
14621 union tree_node buf;
14622 int i, len;
14624 recursive_label:
14625 if (expr == NULL)
14626 return;
14627 slot = ht.find_slot (expr, INSERT);
14628 if (*slot != NULL)
14629 return;
14630 *slot = CONST_CAST_TREE (expr);
14631 code = TREE_CODE (expr);
14632 if (TREE_CODE_CLASS (code) == tcc_declaration
14633 && DECL_ASSEMBLER_NAME_SET_P (expr))
14635 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14636 memcpy ((char *) &buf, expr, tree_size (expr));
14637 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14638 expr = (tree) &buf;
14640 else if (TREE_CODE_CLASS (code) == tcc_type
14641 && (TYPE_POINTER_TO (expr)
14642 || TYPE_REFERENCE_TO (expr)
14643 || TYPE_CACHED_VALUES_P (expr)
14644 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14645 || TYPE_NEXT_VARIANT (expr)))
14647 /* Allow these fields to be modified. */
14648 tree tmp;
14649 memcpy ((char *) &buf, expr, tree_size (expr));
14650 expr = tmp = (tree) &buf;
14651 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14652 TYPE_POINTER_TO (tmp) = NULL;
14653 TYPE_REFERENCE_TO (tmp) = NULL;
14654 TYPE_NEXT_VARIANT (tmp) = NULL;
14655 if (TYPE_CACHED_VALUES_P (tmp))
14657 TYPE_CACHED_VALUES_P (tmp) = 0;
14658 TYPE_CACHED_VALUES (tmp) = NULL;
14661 md5_process_bytes (expr, tree_size (expr), ctx);
14662 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14663 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14664 if (TREE_CODE_CLASS (code) != tcc_type
14665 && TREE_CODE_CLASS (code) != tcc_declaration
14666 && code != TREE_LIST
14667 && code != SSA_NAME
14668 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14669 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14670 switch (TREE_CODE_CLASS (code))
14672 case tcc_constant:
14673 switch (code)
14675 case STRING_CST:
14676 md5_process_bytes (TREE_STRING_POINTER (expr),
14677 TREE_STRING_LENGTH (expr), ctx);
14678 break;
14679 case COMPLEX_CST:
14680 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14681 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14682 break;
14683 case VECTOR_CST:
14684 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14685 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14686 break;
14687 default:
14688 break;
14690 break;
14691 case tcc_exceptional:
14692 switch (code)
14694 case TREE_LIST:
14695 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14696 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14697 expr = TREE_CHAIN (expr);
14698 goto recursive_label;
14699 break;
14700 case TREE_VEC:
14701 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14702 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14703 break;
14704 default:
14705 break;
14707 break;
14708 case tcc_expression:
14709 case tcc_reference:
14710 case tcc_comparison:
14711 case tcc_unary:
14712 case tcc_binary:
14713 case tcc_statement:
14714 case tcc_vl_exp:
14715 len = TREE_OPERAND_LENGTH (expr);
14716 for (i = 0; i < len; ++i)
14717 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14718 break;
14719 case tcc_declaration:
14720 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14721 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14722 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14724 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14725 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14726 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14727 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14728 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14730 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14731 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14733 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14735 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14736 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14737 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14739 break;
14740 case tcc_type:
14741 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14742 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14743 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14744 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14745 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14746 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14747 if (INTEGRAL_TYPE_P (expr)
14748 || SCALAR_FLOAT_TYPE_P (expr))
14750 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14751 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14753 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14754 if (TREE_CODE (expr) == RECORD_TYPE
14755 || TREE_CODE (expr) == UNION_TYPE
14756 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14757 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14758 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14759 break;
14760 default:
14761 break;
14765 /* Helper function for outputting the checksum of a tree T. When
14766 debugging with gdb, you can "define mynext" to be "next" followed
14767 by "call debug_fold_checksum (op0)", then just trace down till the
14768 outputs differ. */
14770 DEBUG_FUNCTION void
14771 debug_fold_checksum (const_tree t)
14773 int i;
14774 unsigned char checksum[16];
14775 struct md5_ctx ctx;
14776 hash_table <pointer_hash <tree_node> > ht;
14777 ht.create (32);
14779 md5_init_ctx (&ctx);
14780 fold_checksum_tree (t, &ctx, ht);
14781 md5_finish_ctx (&ctx, checksum);
14782 ht.empty ();
14784 for (i = 0; i < 16; i++)
14785 fprintf (stderr, "%d ", checksum[i]);
14787 fprintf (stderr, "\n");
14790 #endif
14792 /* Fold a unary tree expression with code CODE of type TYPE with an
14793 operand OP0. LOC is the location of the resulting expression.
14794 Return a folded expression if successful. Otherwise, return a tree
14795 expression with code CODE of type TYPE with an operand OP0. */
14797 tree
14798 fold_build1_stat_loc (location_t loc,
14799 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14801 tree tem;
14802 #ifdef ENABLE_FOLD_CHECKING
14803 unsigned char checksum_before[16], checksum_after[16];
14804 struct md5_ctx ctx;
14805 hash_table <pointer_hash <tree_node> > ht;
14807 ht.create (32);
14808 md5_init_ctx (&ctx);
14809 fold_checksum_tree (op0, &ctx, ht);
14810 md5_finish_ctx (&ctx, checksum_before);
14811 ht.empty ();
14812 #endif
14814 tem = fold_unary_loc (loc, code, type, op0);
14815 if (!tem)
14816 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14818 #ifdef ENABLE_FOLD_CHECKING
14819 md5_init_ctx (&ctx);
14820 fold_checksum_tree (op0, &ctx, ht);
14821 md5_finish_ctx (&ctx, checksum_after);
14822 ht.dispose ();
14824 if (memcmp (checksum_before, checksum_after, 16))
14825 fold_check_failed (op0, tem);
14826 #endif
14827 return tem;
14830 /* Fold a binary tree expression with code CODE of type TYPE with
14831 operands OP0 and OP1. LOC is the location of the resulting
14832 expression. Return a folded expression if successful. Otherwise,
14833 return a tree expression with code CODE of type TYPE with operands
14834 OP0 and OP1. */
14836 tree
14837 fold_build2_stat_loc (location_t loc,
14838 enum tree_code code, tree type, tree op0, tree op1
14839 MEM_STAT_DECL)
14841 tree tem;
14842 #ifdef ENABLE_FOLD_CHECKING
14843 unsigned char checksum_before_op0[16],
14844 checksum_before_op1[16],
14845 checksum_after_op0[16],
14846 checksum_after_op1[16];
14847 struct md5_ctx ctx;
14848 hash_table <pointer_hash <tree_node> > ht;
14850 ht.create (32);
14851 md5_init_ctx (&ctx);
14852 fold_checksum_tree (op0, &ctx, ht);
14853 md5_finish_ctx (&ctx, checksum_before_op0);
14854 ht.empty ();
14856 md5_init_ctx (&ctx);
14857 fold_checksum_tree (op1, &ctx, ht);
14858 md5_finish_ctx (&ctx, checksum_before_op1);
14859 ht.empty ();
14860 #endif
14862 tem = fold_binary_loc (loc, code, type, op0, op1);
14863 if (!tem)
14864 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14866 #ifdef ENABLE_FOLD_CHECKING
14867 md5_init_ctx (&ctx);
14868 fold_checksum_tree (op0, &ctx, ht);
14869 md5_finish_ctx (&ctx, checksum_after_op0);
14870 ht.empty ();
14872 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14873 fold_check_failed (op0, tem);
14875 md5_init_ctx (&ctx);
14876 fold_checksum_tree (op1, &ctx, ht);
14877 md5_finish_ctx (&ctx, checksum_after_op1);
14878 ht.dispose ();
14880 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14881 fold_check_failed (op1, tem);
14882 #endif
14883 return tem;
14886 /* Fold a ternary tree expression with code CODE of type TYPE with
14887 operands OP0, OP1, and OP2. Return a folded expression if
14888 successful. Otherwise, return a tree expression with code CODE of
14889 type TYPE with operands OP0, OP1, and OP2. */
14891 tree
14892 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14893 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14895 tree tem;
14896 #ifdef ENABLE_FOLD_CHECKING
14897 unsigned char checksum_before_op0[16],
14898 checksum_before_op1[16],
14899 checksum_before_op2[16],
14900 checksum_after_op0[16],
14901 checksum_after_op1[16],
14902 checksum_after_op2[16];
14903 struct md5_ctx ctx;
14904 hash_table <pointer_hash <tree_node> > ht;
14906 ht.create (32);
14907 md5_init_ctx (&ctx);
14908 fold_checksum_tree (op0, &ctx, ht);
14909 md5_finish_ctx (&ctx, checksum_before_op0);
14910 ht.empty ();
14912 md5_init_ctx (&ctx);
14913 fold_checksum_tree (op1, &ctx, ht);
14914 md5_finish_ctx (&ctx, checksum_before_op1);
14915 ht.empty ();
14917 md5_init_ctx (&ctx);
14918 fold_checksum_tree (op2, &ctx, ht);
14919 md5_finish_ctx (&ctx, checksum_before_op2);
14920 ht.empty ();
14921 #endif
14923 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14924 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14925 if (!tem)
14926 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14928 #ifdef ENABLE_FOLD_CHECKING
14929 md5_init_ctx (&ctx);
14930 fold_checksum_tree (op0, &ctx, ht);
14931 md5_finish_ctx (&ctx, checksum_after_op0);
14932 ht.empty ();
14934 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14935 fold_check_failed (op0, tem);
14937 md5_init_ctx (&ctx);
14938 fold_checksum_tree (op1, &ctx, ht);
14939 md5_finish_ctx (&ctx, checksum_after_op1);
14940 ht.empty ();
14942 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14943 fold_check_failed (op1, tem);
14945 md5_init_ctx (&ctx);
14946 fold_checksum_tree (op2, &ctx, ht);
14947 md5_finish_ctx (&ctx, checksum_after_op2);
14948 ht.dispose ();
14950 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14951 fold_check_failed (op2, tem);
14952 #endif
14953 return tem;
14956 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14957 arguments in ARGARRAY, and a null static chain.
14958 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14959 of type TYPE from the given operands as constructed by build_call_array. */
14961 tree
14962 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14963 int nargs, tree *argarray)
14965 tree tem;
14966 #ifdef ENABLE_FOLD_CHECKING
14967 unsigned char checksum_before_fn[16],
14968 checksum_before_arglist[16],
14969 checksum_after_fn[16],
14970 checksum_after_arglist[16];
14971 struct md5_ctx ctx;
14972 hash_table <pointer_hash <tree_node> > ht;
14973 int i;
14975 ht.create (32);
14976 md5_init_ctx (&ctx);
14977 fold_checksum_tree (fn, &ctx, ht);
14978 md5_finish_ctx (&ctx, checksum_before_fn);
14979 ht.empty ();
14981 md5_init_ctx (&ctx);
14982 for (i = 0; i < nargs; i++)
14983 fold_checksum_tree (argarray[i], &ctx, ht);
14984 md5_finish_ctx (&ctx, checksum_before_arglist);
14985 ht.empty ();
14986 #endif
14988 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14990 #ifdef ENABLE_FOLD_CHECKING
14991 md5_init_ctx (&ctx);
14992 fold_checksum_tree (fn, &ctx, ht);
14993 md5_finish_ctx (&ctx, checksum_after_fn);
14994 ht.empty ();
14996 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14997 fold_check_failed (fn, tem);
14999 md5_init_ctx (&ctx);
15000 for (i = 0; i < nargs; i++)
15001 fold_checksum_tree (argarray[i], &ctx, ht);
15002 md5_finish_ctx (&ctx, checksum_after_arglist);
15003 ht.dispose ();
15005 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15006 fold_check_failed (NULL_TREE, tem);
15007 #endif
15008 return tem;
15011 /* Perform constant folding and related simplification of initializer
15012 expression EXPR. These behave identically to "fold_buildN" but ignore
15013 potential run-time traps and exceptions that fold must preserve. */
15015 #define START_FOLD_INIT \
15016 int saved_signaling_nans = flag_signaling_nans;\
15017 int saved_trapping_math = flag_trapping_math;\
15018 int saved_rounding_math = flag_rounding_math;\
15019 int saved_trapv = flag_trapv;\
15020 int saved_folding_initializer = folding_initializer;\
15021 flag_signaling_nans = 0;\
15022 flag_trapping_math = 0;\
15023 flag_rounding_math = 0;\
15024 flag_trapv = 0;\
15025 folding_initializer = 1;
15027 #define END_FOLD_INIT \
15028 flag_signaling_nans = saved_signaling_nans;\
15029 flag_trapping_math = saved_trapping_math;\
15030 flag_rounding_math = saved_rounding_math;\
15031 flag_trapv = saved_trapv;\
15032 folding_initializer = saved_folding_initializer;
15034 tree
15035 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15036 tree type, tree op)
15038 tree result;
15039 START_FOLD_INIT;
15041 result = fold_build1_loc (loc, code, type, op);
15043 END_FOLD_INIT;
15044 return result;
15047 tree
15048 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15049 tree type, tree op0, tree op1)
15051 tree result;
15052 START_FOLD_INIT;
15054 result = fold_build2_loc (loc, code, type, op0, op1);
15056 END_FOLD_INIT;
15057 return result;
15060 tree
15061 fold_build3_initializer_loc (location_t loc, enum tree_code code,
15062 tree type, tree op0, tree op1, tree op2)
15064 tree result;
15065 START_FOLD_INIT;
15067 result = fold_build3_loc (loc, code, type, op0, op1, op2);
15069 END_FOLD_INIT;
15070 return result;
15073 tree
15074 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15075 int nargs, tree *argarray)
15077 tree result;
15078 START_FOLD_INIT;
15080 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15082 END_FOLD_INIT;
15083 return result;
15086 #undef START_FOLD_INIT
15087 #undef END_FOLD_INIT
15089 /* Determine if first argument is a multiple of second argument. Return 0 if
15090 it is not, or we cannot easily determined it to be.
15092 An example of the sort of thing we care about (at this point; this routine
15093 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15094 fold cases do now) is discovering that
15096 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15098 is a multiple of
15100 SAVE_EXPR (J * 8)
15102 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15104 This code also handles discovering that
15106 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15108 is a multiple of 8 so we don't have to worry about dealing with a
15109 possible remainder.
15111 Note that we *look* inside a SAVE_EXPR only to determine how it was
15112 calculated; it is not safe for fold to do much of anything else with the
15113 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15114 at run time. For example, the latter example above *cannot* be implemented
15115 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15116 evaluation time of the original SAVE_EXPR is not necessarily the same at
15117 the time the new expression is evaluated. The only optimization of this
15118 sort that would be valid is changing
15120 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15122 divided by 8 to
15124 SAVE_EXPR (I) * SAVE_EXPR (J)
15126 (where the same SAVE_EXPR (J) is used in the original and the
15127 transformed version). */
15130 multiple_of_p (tree type, const_tree top, const_tree bottom)
15132 if (operand_equal_p (top, bottom, 0))
15133 return 1;
15135 if (TREE_CODE (type) != INTEGER_TYPE)
15136 return 0;
15138 switch (TREE_CODE (top))
15140 case BIT_AND_EXPR:
15141 /* Bitwise and provides a power of two multiple. If the mask is
15142 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15143 if (!integer_pow2p (bottom))
15144 return 0;
15145 /* FALLTHRU */
15147 case MULT_EXPR:
15148 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15149 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15151 case PLUS_EXPR:
15152 case MINUS_EXPR:
15153 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15154 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15156 case LSHIFT_EXPR:
15157 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15159 tree op1, t1;
15161 op1 = TREE_OPERAND (top, 1);
15162 /* const_binop may not detect overflow correctly,
15163 so check for it explicitly here. */
15164 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15165 > TREE_INT_CST_LOW (op1)
15166 && TREE_INT_CST_HIGH (op1) == 0
15167 && 0 != (t1 = fold_convert (type,
15168 const_binop (LSHIFT_EXPR,
15169 size_one_node,
15170 op1)))
15171 && !TREE_OVERFLOW (t1))
15172 return multiple_of_p (type, t1, bottom);
15174 return 0;
15176 case NOP_EXPR:
15177 /* Can't handle conversions from non-integral or wider integral type. */
15178 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15179 || (TYPE_PRECISION (type)
15180 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15181 return 0;
15183 /* .. fall through ... */
15185 case SAVE_EXPR:
15186 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15188 case COND_EXPR:
15189 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15190 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15192 case INTEGER_CST:
15193 if (TREE_CODE (bottom) != INTEGER_CST
15194 || integer_zerop (bottom)
15195 || (TYPE_UNSIGNED (type)
15196 && (tree_int_cst_sgn (top) < 0
15197 || tree_int_cst_sgn (bottom) < 0)))
15198 return 0;
15199 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15200 top, bottom));
15202 default:
15203 return 0;
15207 /* Return true if CODE or TYPE is known to be non-negative. */
15209 static bool
15210 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15212 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15213 && truth_value_p (code))
15214 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15215 have a signed:1 type (where the value is -1 and 0). */
15216 return true;
15217 return false;
15220 /* Return true if (CODE OP0) is known to be non-negative. If the return
15221 value is based on the assumption that signed overflow is undefined,
15222 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15223 *STRICT_OVERFLOW_P. */
15225 bool
15226 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15227 bool *strict_overflow_p)
15229 if (TYPE_UNSIGNED (type))
15230 return true;
15232 switch (code)
15234 case ABS_EXPR:
15235 /* We can't return 1 if flag_wrapv is set because
15236 ABS_EXPR<INT_MIN> = INT_MIN. */
15237 if (!INTEGRAL_TYPE_P (type))
15238 return true;
15239 if (TYPE_OVERFLOW_UNDEFINED (type))
15241 *strict_overflow_p = true;
15242 return true;
15244 break;
15246 case NON_LVALUE_EXPR:
15247 case FLOAT_EXPR:
15248 case FIX_TRUNC_EXPR:
15249 return tree_expr_nonnegative_warnv_p (op0,
15250 strict_overflow_p);
15252 case NOP_EXPR:
15254 tree inner_type = TREE_TYPE (op0);
15255 tree outer_type = type;
15257 if (TREE_CODE (outer_type) == REAL_TYPE)
15259 if (TREE_CODE (inner_type) == REAL_TYPE)
15260 return tree_expr_nonnegative_warnv_p (op0,
15261 strict_overflow_p);
15262 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15264 if (TYPE_UNSIGNED (inner_type))
15265 return true;
15266 return tree_expr_nonnegative_warnv_p (op0,
15267 strict_overflow_p);
15270 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15272 if (TREE_CODE (inner_type) == REAL_TYPE)
15273 return tree_expr_nonnegative_warnv_p (op0,
15274 strict_overflow_p);
15275 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15276 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15277 && TYPE_UNSIGNED (inner_type);
15280 break;
15282 default:
15283 return tree_simple_nonnegative_warnv_p (code, type);
15286 /* We don't know sign of `t', so be conservative and return false. */
15287 return false;
15290 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15291 value is based on the assumption that signed overflow is undefined,
15292 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15293 *STRICT_OVERFLOW_P. */
15295 bool
15296 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15297 tree op1, bool *strict_overflow_p)
15299 if (TYPE_UNSIGNED (type))
15300 return true;
15302 switch (code)
15304 case POINTER_PLUS_EXPR:
15305 case PLUS_EXPR:
15306 if (FLOAT_TYPE_P (type))
15307 return (tree_expr_nonnegative_warnv_p (op0,
15308 strict_overflow_p)
15309 && tree_expr_nonnegative_warnv_p (op1,
15310 strict_overflow_p));
15312 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15313 both unsigned and at least 2 bits shorter than the result. */
15314 if (TREE_CODE (type) == INTEGER_TYPE
15315 && TREE_CODE (op0) == NOP_EXPR
15316 && TREE_CODE (op1) == NOP_EXPR)
15318 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15319 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15320 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15321 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15323 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15324 TYPE_PRECISION (inner2)) + 1;
15325 return prec < TYPE_PRECISION (type);
15328 break;
15330 case MULT_EXPR:
15331 if (FLOAT_TYPE_P (type))
15333 /* x * x for floating point x is always non-negative. */
15334 if (operand_equal_p (op0, op1, 0))
15335 return true;
15336 return (tree_expr_nonnegative_warnv_p (op0,
15337 strict_overflow_p)
15338 && tree_expr_nonnegative_warnv_p (op1,
15339 strict_overflow_p));
15342 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15343 both unsigned and their total bits is shorter than the result. */
15344 if (TREE_CODE (type) == INTEGER_TYPE
15345 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15346 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15348 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15349 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15350 : TREE_TYPE (op0);
15351 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15352 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15353 : TREE_TYPE (op1);
15355 bool unsigned0 = TYPE_UNSIGNED (inner0);
15356 bool unsigned1 = TYPE_UNSIGNED (inner1);
15358 if (TREE_CODE (op0) == INTEGER_CST)
15359 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15361 if (TREE_CODE (op1) == INTEGER_CST)
15362 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15364 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15365 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15367 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15368 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15369 : TYPE_PRECISION (inner0);
15371 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15372 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15373 : TYPE_PRECISION (inner1);
15375 return precision0 + precision1 < TYPE_PRECISION (type);
15378 return false;
15380 case BIT_AND_EXPR:
15381 case MAX_EXPR:
15382 return (tree_expr_nonnegative_warnv_p (op0,
15383 strict_overflow_p)
15384 || tree_expr_nonnegative_warnv_p (op1,
15385 strict_overflow_p));
15387 case BIT_IOR_EXPR:
15388 case BIT_XOR_EXPR:
15389 case MIN_EXPR:
15390 case RDIV_EXPR:
15391 case TRUNC_DIV_EXPR:
15392 case CEIL_DIV_EXPR:
15393 case FLOOR_DIV_EXPR:
15394 case ROUND_DIV_EXPR:
15395 return (tree_expr_nonnegative_warnv_p (op0,
15396 strict_overflow_p)
15397 && tree_expr_nonnegative_warnv_p (op1,
15398 strict_overflow_p));
15400 case TRUNC_MOD_EXPR:
15401 case CEIL_MOD_EXPR:
15402 case FLOOR_MOD_EXPR:
15403 case ROUND_MOD_EXPR:
15404 return tree_expr_nonnegative_warnv_p (op0,
15405 strict_overflow_p);
15406 default:
15407 return tree_simple_nonnegative_warnv_p (code, type);
15410 /* We don't know sign of `t', so be conservative and return false. */
15411 return false;
15414 /* Return true if T is known to be non-negative. If the return
15415 value is based on the assumption that signed overflow is undefined,
15416 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15417 *STRICT_OVERFLOW_P. */
15419 bool
15420 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15422 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15423 return true;
15425 switch (TREE_CODE (t))
15427 case INTEGER_CST:
15428 return tree_int_cst_sgn (t) >= 0;
15430 case REAL_CST:
15431 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15433 case FIXED_CST:
15434 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15436 case COND_EXPR:
15437 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15438 strict_overflow_p)
15439 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15440 strict_overflow_p));
15441 default:
15442 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15443 TREE_TYPE (t));
15445 /* We don't know sign of `t', so be conservative and return false. */
15446 return false;
15449 /* Return true if T is known to be non-negative. If the return
15450 value is based on the assumption that signed overflow is undefined,
15451 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15452 *STRICT_OVERFLOW_P. */
15454 bool
15455 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15456 tree arg0, tree arg1, bool *strict_overflow_p)
15458 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15459 switch (DECL_FUNCTION_CODE (fndecl))
15461 CASE_FLT_FN (BUILT_IN_ACOS):
15462 CASE_FLT_FN (BUILT_IN_ACOSH):
15463 CASE_FLT_FN (BUILT_IN_CABS):
15464 CASE_FLT_FN (BUILT_IN_COSH):
15465 CASE_FLT_FN (BUILT_IN_ERFC):
15466 CASE_FLT_FN (BUILT_IN_EXP):
15467 CASE_FLT_FN (BUILT_IN_EXP10):
15468 CASE_FLT_FN (BUILT_IN_EXP2):
15469 CASE_FLT_FN (BUILT_IN_FABS):
15470 CASE_FLT_FN (BUILT_IN_FDIM):
15471 CASE_FLT_FN (BUILT_IN_HYPOT):
15472 CASE_FLT_FN (BUILT_IN_POW10):
15473 CASE_INT_FN (BUILT_IN_FFS):
15474 CASE_INT_FN (BUILT_IN_PARITY):
15475 CASE_INT_FN (BUILT_IN_POPCOUNT):
15476 case BUILT_IN_BSWAP32:
15477 case BUILT_IN_BSWAP64:
15478 /* Always true. */
15479 return true;
15481 CASE_FLT_FN (BUILT_IN_SQRT):
15482 /* sqrt(-0.0) is -0.0. */
15483 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15484 return true;
15485 return tree_expr_nonnegative_warnv_p (arg0,
15486 strict_overflow_p);
15488 CASE_FLT_FN (BUILT_IN_ASINH):
15489 CASE_FLT_FN (BUILT_IN_ATAN):
15490 CASE_FLT_FN (BUILT_IN_ATANH):
15491 CASE_FLT_FN (BUILT_IN_CBRT):
15492 CASE_FLT_FN (BUILT_IN_CEIL):
15493 CASE_FLT_FN (BUILT_IN_ERF):
15494 CASE_FLT_FN (BUILT_IN_EXPM1):
15495 CASE_FLT_FN (BUILT_IN_FLOOR):
15496 CASE_FLT_FN (BUILT_IN_FMOD):
15497 CASE_FLT_FN (BUILT_IN_FREXP):
15498 CASE_FLT_FN (BUILT_IN_ICEIL):
15499 CASE_FLT_FN (BUILT_IN_IFLOOR):
15500 CASE_FLT_FN (BUILT_IN_IRINT):
15501 CASE_FLT_FN (BUILT_IN_IROUND):
15502 CASE_FLT_FN (BUILT_IN_LCEIL):
15503 CASE_FLT_FN (BUILT_IN_LDEXP):
15504 CASE_FLT_FN (BUILT_IN_LFLOOR):
15505 CASE_FLT_FN (BUILT_IN_LLCEIL):
15506 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15507 CASE_FLT_FN (BUILT_IN_LLRINT):
15508 CASE_FLT_FN (BUILT_IN_LLROUND):
15509 CASE_FLT_FN (BUILT_IN_LRINT):
15510 CASE_FLT_FN (BUILT_IN_LROUND):
15511 CASE_FLT_FN (BUILT_IN_MODF):
15512 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15513 CASE_FLT_FN (BUILT_IN_RINT):
15514 CASE_FLT_FN (BUILT_IN_ROUND):
15515 CASE_FLT_FN (BUILT_IN_SCALB):
15516 CASE_FLT_FN (BUILT_IN_SCALBLN):
15517 CASE_FLT_FN (BUILT_IN_SCALBN):
15518 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15519 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15520 CASE_FLT_FN (BUILT_IN_SINH):
15521 CASE_FLT_FN (BUILT_IN_TANH):
15522 CASE_FLT_FN (BUILT_IN_TRUNC):
15523 /* True if the 1st argument is nonnegative. */
15524 return tree_expr_nonnegative_warnv_p (arg0,
15525 strict_overflow_p);
15527 CASE_FLT_FN (BUILT_IN_FMAX):
15528 /* True if the 1st OR 2nd arguments are nonnegative. */
15529 return (tree_expr_nonnegative_warnv_p (arg0,
15530 strict_overflow_p)
15531 || (tree_expr_nonnegative_warnv_p (arg1,
15532 strict_overflow_p)));
15534 CASE_FLT_FN (BUILT_IN_FMIN):
15535 /* True if the 1st AND 2nd arguments are nonnegative. */
15536 return (tree_expr_nonnegative_warnv_p (arg0,
15537 strict_overflow_p)
15538 && (tree_expr_nonnegative_warnv_p (arg1,
15539 strict_overflow_p)));
15541 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15542 /* True if the 2nd argument is nonnegative. */
15543 return tree_expr_nonnegative_warnv_p (arg1,
15544 strict_overflow_p);
15546 CASE_FLT_FN (BUILT_IN_POWI):
15547 /* True if the 1st argument is nonnegative or the second
15548 argument is an even integer. */
15549 if (TREE_CODE (arg1) == INTEGER_CST
15550 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15551 return true;
15552 return tree_expr_nonnegative_warnv_p (arg0,
15553 strict_overflow_p);
15555 CASE_FLT_FN (BUILT_IN_POW):
15556 /* True if the 1st argument is nonnegative or the second
15557 argument is an even integer valued real. */
15558 if (TREE_CODE (arg1) == REAL_CST)
15560 REAL_VALUE_TYPE c;
15561 HOST_WIDE_INT n;
15563 c = TREE_REAL_CST (arg1);
15564 n = real_to_integer (&c);
15565 if ((n & 1) == 0)
15567 REAL_VALUE_TYPE cint;
15568 real_from_integer (&cint, VOIDmode, n,
15569 n < 0 ? -1 : 0, 0);
15570 if (real_identical (&c, &cint))
15571 return true;
15574 return tree_expr_nonnegative_warnv_p (arg0,
15575 strict_overflow_p);
15577 default:
15578 break;
15580 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15581 type);
15584 /* Return true if T is known to be non-negative. If the return
15585 value is based on the assumption that signed overflow is undefined,
15586 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15587 *STRICT_OVERFLOW_P. */
15589 bool
15590 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15592 enum tree_code code = TREE_CODE (t);
15593 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15594 return true;
15596 switch (code)
15598 case TARGET_EXPR:
15600 tree temp = TARGET_EXPR_SLOT (t);
15601 t = TARGET_EXPR_INITIAL (t);
15603 /* If the initializer is non-void, then it's a normal expression
15604 that will be assigned to the slot. */
15605 if (!VOID_TYPE_P (t))
15606 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15608 /* Otherwise, the initializer sets the slot in some way. One common
15609 way is an assignment statement at the end of the initializer. */
15610 while (1)
15612 if (TREE_CODE (t) == BIND_EXPR)
15613 t = expr_last (BIND_EXPR_BODY (t));
15614 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15615 || TREE_CODE (t) == TRY_CATCH_EXPR)
15616 t = expr_last (TREE_OPERAND (t, 0));
15617 else if (TREE_CODE (t) == STATEMENT_LIST)
15618 t = expr_last (t);
15619 else
15620 break;
15622 if (TREE_CODE (t) == MODIFY_EXPR
15623 && TREE_OPERAND (t, 0) == temp)
15624 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15625 strict_overflow_p);
15627 return false;
15630 case CALL_EXPR:
15632 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15633 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15635 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15636 get_callee_fndecl (t),
15637 arg0,
15638 arg1,
15639 strict_overflow_p);
15641 case COMPOUND_EXPR:
15642 case MODIFY_EXPR:
15643 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15644 strict_overflow_p);
15645 case BIND_EXPR:
15646 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15647 strict_overflow_p);
15648 case SAVE_EXPR:
15649 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15650 strict_overflow_p);
15652 default:
15653 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15654 TREE_TYPE (t));
15657 /* We don't know sign of `t', so be conservative and return false. */
15658 return false;
15661 /* Return true if T is known to be non-negative. If the return
15662 value is based on the assumption that signed overflow is undefined,
15663 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15664 *STRICT_OVERFLOW_P. */
15666 bool
15667 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15669 enum tree_code code;
15670 if (t == error_mark_node)
15671 return false;
15673 code = TREE_CODE (t);
15674 switch (TREE_CODE_CLASS (code))
15676 case tcc_binary:
15677 case tcc_comparison:
15678 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15679 TREE_TYPE (t),
15680 TREE_OPERAND (t, 0),
15681 TREE_OPERAND (t, 1),
15682 strict_overflow_p);
15684 case tcc_unary:
15685 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15686 TREE_TYPE (t),
15687 TREE_OPERAND (t, 0),
15688 strict_overflow_p);
15690 case tcc_constant:
15691 case tcc_declaration:
15692 case tcc_reference:
15693 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15695 default:
15696 break;
15699 switch (code)
15701 case TRUTH_AND_EXPR:
15702 case TRUTH_OR_EXPR:
15703 case TRUTH_XOR_EXPR:
15704 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15705 TREE_TYPE (t),
15706 TREE_OPERAND (t, 0),
15707 TREE_OPERAND (t, 1),
15708 strict_overflow_p);
15709 case TRUTH_NOT_EXPR:
15710 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15711 TREE_TYPE (t),
15712 TREE_OPERAND (t, 0),
15713 strict_overflow_p);
15715 case COND_EXPR:
15716 case CONSTRUCTOR:
15717 case OBJ_TYPE_REF:
15718 case ASSERT_EXPR:
15719 case ADDR_EXPR:
15720 case WITH_SIZE_EXPR:
15721 case SSA_NAME:
15722 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15724 default:
15725 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15729 /* Return true if `t' is known to be non-negative. Handle warnings
15730 about undefined signed overflow. */
15732 bool
15733 tree_expr_nonnegative_p (tree t)
15735 bool ret, strict_overflow_p;
15737 strict_overflow_p = false;
15738 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15739 if (strict_overflow_p)
15740 fold_overflow_warning (("assuming signed overflow does not occur when "
15741 "determining that expression is always "
15742 "non-negative"),
15743 WARN_STRICT_OVERFLOW_MISC);
15744 return ret;
15748 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15749 For floating point we further ensure that T is not denormal.
15750 Similar logic is present in nonzero_address in rtlanal.h.
15752 If the return value is based on the assumption that signed overflow
15753 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15754 change *STRICT_OVERFLOW_P. */
15756 bool
15757 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15758 bool *strict_overflow_p)
15760 switch (code)
15762 case ABS_EXPR:
15763 return tree_expr_nonzero_warnv_p (op0,
15764 strict_overflow_p);
15766 case NOP_EXPR:
15768 tree inner_type = TREE_TYPE (op0);
15769 tree outer_type = type;
15771 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15772 && tree_expr_nonzero_warnv_p (op0,
15773 strict_overflow_p));
15775 break;
15777 case NON_LVALUE_EXPR:
15778 return tree_expr_nonzero_warnv_p (op0,
15779 strict_overflow_p);
15781 default:
15782 break;
15785 return false;
15788 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15789 For floating point we further ensure that T is not denormal.
15790 Similar logic is present in nonzero_address in rtlanal.h.
15792 If the return value is based on the assumption that signed overflow
15793 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15794 change *STRICT_OVERFLOW_P. */
15796 bool
15797 tree_binary_nonzero_warnv_p (enum tree_code code,
15798 tree type,
15799 tree op0,
15800 tree op1, bool *strict_overflow_p)
15802 bool sub_strict_overflow_p;
15803 switch (code)
15805 case POINTER_PLUS_EXPR:
15806 case PLUS_EXPR:
15807 if (TYPE_OVERFLOW_UNDEFINED (type))
15809 /* With the presence of negative values it is hard
15810 to say something. */
15811 sub_strict_overflow_p = false;
15812 if (!tree_expr_nonnegative_warnv_p (op0,
15813 &sub_strict_overflow_p)
15814 || !tree_expr_nonnegative_warnv_p (op1,
15815 &sub_strict_overflow_p))
15816 return false;
15817 /* One of operands must be positive and the other non-negative. */
15818 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15819 overflows, on a twos-complement machine the sum of two
15820 nonnegative numbers can never be zero. */
15821 return (tree_expr_nonzero_warnv_p (op0,
15822 strict_overflow_p)
15823 || tree_expr_nonzero_warnv_p (op1,
15824 strict_overflow_p));
15826 break;
15828 case MULT_EXPR:
15829 if (TYPE_OVERFLOW_UNDEFINED (type))
15831 if (tree_expr_nonzero_warnv_p (op0,
15832 strict_overflow_p)
15833 && tree_expr_nonzero_warnv_p (op1,
15834 strict_overflow_p))
15836 *strict_overflow_p = true;
15837 return true;
15840 break;
15842 case MIN_EXPR:
15843 sub_strict_overflow_p = false;
15844 if (tree_expr_nonzero_warnv_p (op0,
15845 &sub_strict_overflow_p)
15846 && tree_expr_nonzero_warnv_p (op1,
15847 &sub_strict_overflow_p))
15849 if (sub_strict_overflow_p)
15850 *strict_overflow_p = true;
15852 break;
15854 case MAX_EXPR:
15855 sub_strict_overflow_p = false;
15856 if (tree_expr_nonzero_warnv_p (op0,
15857 &sub_strict_overflow_p))
15859 if (sub_strict_overflow_p)
15860 *strict_overflow_p = true;
15862 /* When both operands are nonzero, then MAX must be too. */
15863 if (tree_expr_nonzero_warnv_p (op1,
15864 strict_overflow_p))
15865 return true;
15867 /* MAX where operand 0 is positive is positive. */
15868 return tree_expr_nonnegative_warnv_p (op0,
15869 strict_overflow_p);
15871 /* MAX where operand 1 is positive is positive. */
15872 else if (tree_expr_nonzero_warnv_p (op1,
15873 &sub_strict_overflow_p)
15874 && tree_expr_nonnegative_warnv_p (op1,
15875 &sub_strict_overflow_p))
15877 if (sub_strict_overflow_p)
15878 *strict_overflow_p = true;
15879 return true;
15881 break;
15883 case BIT_IOR_EXPR:
15884 return (tree_expr_nonzero_warnv_p (op1,
15885 strict_overflow_p)
15886 || tree_expr_nonzero_warnv_p (op0,
15887 strict_overflow_p));
15889 default:
15890 break;
15893 return false;
15896 /* Return true when T is an address and is known to be nonzero.
15897 For floating point we further ensure that T is not denormal.
15898 Similar logic is present in nonzero_address in rtlanal.h.
15900 If the return value is based on the assumption that signed overflow
15901 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15902 change *STRICT_OVERFLOW_P. */
15904 bool
15905 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15907 bool sub_strict_overflow_p;
15908 switch (TREE_CODE (t))
15910 case INTEGER_CST:
15911 return !integer_zerop (t);
15913 case ADDR_EXPR:
15915 tree base = TREE_OPERAND (t, 0);
15916 if (!DECL_P (base))
15917 base = get_base_address (base);
15919 if (!base)
15920 return false;
15922 /* Weak declarations may link to NULL. Other things may also be NULL
15923 so protect with -fdelete-null-pointer-checks; but not variables
15924 allocated on the stack. */
15925 if (DECL_P (base)
15926 && (flag_delete_null_pointer_checks
15927 || (DECL_CONTEXT (base)
15928 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15929 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15930 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15932 /* Constants are never weak. */
15933 if (CONSTANT_CLASS_P (base))
15934 return true;
15936 return false;
15939 case COND_EXPR:
15940 sub_strict_overflow_p = false;
15941 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15942 &sub_strict_overflow_p)
15943 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15944 &sub_strict_overflow_p))
15946 if (sub_strict_overflow_p)
15947 *strict_overflow_p = true;
15948 return true;
15950 break;
15952 default:
15953 break;
15955 return false;
15958 /* Return true when T is an address and is known to be nonzero.
15959 For floating point we further ensure that T is not denormal.
15960 Similar logic is present in nonzero_address in rtlanal.h.
15962 If the return value is based on the assumption that signed overflow
15963 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15964 change *STRICT_OVERFLOW_P. */
15966 bool
15967 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15969 tree type = TREE_TYPE (t);
15970 enum tree_code code;
15972 /* Doing something useful for floating point would need more work. */
15973 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15974 return false;
15976 code = TREE_CODE (t);
15977 switch (TREE_CODE_CLASS (code))
15979 case tcc_unary:
15980 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15981 strict_overflow_p);
15982 case tcc_binary:
15983 case tcc_comparison:
15984 return tree_binary_nonzero_warnv_p (code, type,
15985 TREE_OPERAND (t, 0),
15986 TREE_OPERAND (t, 1),
15987 strict_overflow_p);
15988 case tcc_constant:
15989 case tcc_declaration:
15990 case tcc_reference:
15991 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15993 default:
15994 break;
15997 switch (code)
15999 case TRUTH_NOT_EXPR:
16000 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16001 strict_overflow_p);
16003 case TRUTH_AND_EXPR:
16004 case TRUTH_OR_EXPR:
16005 case TRUTH_XOR_EXPR:
16006 return tree_binary_nonzero_warnv_p (code, type,
16007 TREE_OPERAND (t, 0),
16008 TREE_OPERAND (t, 1),
16009 strict_overflow_p);
16011 case COND_EXPR:
16012 case CONSTRUCTOR:
16013 case OBJ_TYPE_REF:
16014 case ASSERT_EXPR:
16015 case ADDR_EXPR:
16016 case WITH_SIZE_EXPR:
16017 case SSA_NAME:
16018 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16020 case COMPOUND_EXPR:
16021 case MODIFY_EXPR:
16022 case BIND_EXPR:
16023 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16024 strict_overflow_p);
16026 case SAVE_EXPR:
16027 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
16028 strict_overflow_p);
16030 case CALL_EXPR:
16031 return alloca_call_p (t);
16033 default:
16034 break;
16036 return false;
16039 /* Return true when T is an address and is known to be nonzero.
16040 Handle warnings about undefined signed overflow. */
16042 bool
16043 tree_expr_nonzero_p (tree t)
16045 bool ret, strict_overflow_p;
16047 strict_overflow_p = false;
16048 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
16049 if (strict_overflow_p)
16050 fold_overflow_warning (("assuming signed overflow does not occur when "
16051 "determining that expression is always "
16052 "non-zero"),
16053 WARN_STRICT_OVERFLOW_MISC);
16054 return ret;
16057 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16058 attempt to fold the expression to a constant without modifying TYPE,
16059 OP0 or OP1.
16061 If the expression could be simplified to a constant, then return
16062 the constant. If the expression would not be simplified to a
16063 constant, then return NULL_TREE. */
16065 tree
16066 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16068 tree tem = fold_binary (code, type, op0, op1);
16069 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16072 /* Given the components of a unary expression CODE, TYPE and OP0,
16073 attempt to fold the expression to a constant without modifying
16074 TYPE or OP0.
16076 If the expression could be simplified to a constant, then return
16077 the constant. If the expression would not be simplified to a
16078 constant, then return NULL_TREE. */
16080 tree
16081 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16083 tree tem = fold_unary (code, type, op0);
16084 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16087 /* If EXP represents referencing an element in a constant string
16088 (either via pointer arithmetic or array indexing), return the
16089 tree representing the value accessed, otherwise return NULL. */
16091 tree
16092 fold_read_from_constant_string (tree exp)
16094 if ((TREE_CODE (exp) == INDIRECT_REF
16095 || TREE_CODE (exp) == ARRAY_REF)
16096 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16098 tree exp1 = TREE_OPERAND (exp, 0);
16099 tree index;
16100 tree string;
16101 location_t loc = EXPR_LOCATION (exp);
16103 if (TREE_CODE (exp) == INDIRECT_REF)
16104 string = string_constant (exp1, &index);
16105 else
16107 tree low_bound = array_ref_low_bound (exp);
16108 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16110 /* Optimize the special-case of a zero lower bound.
16112 We convert the low_bound to sizetype to avoid some problems
16113 with constant folding. (E.g. suppose the lower bound is 1,
16114 and its mode is QI. Without the conversion,l (ARRAY
16115 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16116 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16117 if (! integer_zerop (low_bound))
16118 index = size_diffop_loc (loc, index,
16119 fold_convert_loc (loc, sizetype, low_bound));
16121 string = exp1;
16124 if (string
16125 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16126 && TREE_CODE (string) == STRING_CST
16127 && TREE_CODE (index) == INTEGER_CST
16128 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16129 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16130 == MODE_INT)
16131 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16132 return build_int_cst_type (TREE_TYPE (exp),
16133 (TREE_STRING_POINTER (string)
16134 [TREE_INT_CST_LOW (index)]));
16136 return NULL;
16139 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16140 an integer constant, real, or fixed-point constant.
16142 TYPE is the type of the result. */
16144 static tree
16145 fold_negate_const (tree arg0, tree type)
16147 tree t = NULL_TREE;
16149 switch (TREE_CODE (arg0))
16151 case INTEGER_CST:
16153 double_int val = tree_to_double_int (arg0);
16154 bool overflow;
16155 val = val.neg_with_overflow (&overflow);
16156 t = force_fit_type_double (type, val, 1,
16157 (overflow | TREE_OVERFLOW (arg0))
16158 && !TYPE_UNSIGNED (type));
16159 break;
16162 case REAL_CST:
16163 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16164 break;
16166 case FIXED_CST:
16168 FIXED_VALUE_TYPE f;
16169 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16170 &(TREE_FIXED_CST (arg0)), NULL,
16171 TYPE_SATURATING (type));
16172 t = build_fixed (type, f);
16173 /* Propagate overflow flags. */
16174 if (overflow_p | TREE_OVERFLOW (arg0))
16175 TREE_OVERFLOW (t) = 1;
16176 break;
16179 default:
16180 gcc_unreachable ();
16183 return t;
16186 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16187 an integer constant or real constant.
16189 TYPE is the type of the result. */
16191 tree
16192 fold_abs_const (tree arg0, tree type)
16194 tree t = NULL_TREE;
16196 switch (TREE_CODE (arg0))
16198 case INTEGER_CST:
16200 double_int val = tree_to_double_int (arg0);
16202 /* If the value is unsigned or non-negative, then the absolute value
16203 is the same as the ordinary value. */
16204 if (TYPE_UNSIGNED (type)
16205 || !val.is_negative ())
16206 t = arg0;
16208 /* If the value is negative, then the absolute value is
16209 its negation. */
16210 else
16212 bool overflow;
16213 val = val.neg_with_overflow (&overflow);
16214 t = force_fit_type_double (type, val, -1,
16215 overflow | TREE_OVERFLOW (arg0));
16218 break;
16220 case REAL_CST:
16221 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16222 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16223 else
16224 t = arg0;
16225 break;
16227 default:
16228 gcc_unreachable ();
16231 return t;
16234 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16235 constant. TYPE is the type of the result. */
16237 static tree
16238 fold_not_const (const_tree arg0, tree type)
16240 double_int val;
16242 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16244 val = ~tree_to_double_int (arg0);
16245 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16248 /* Given CODE, a relational operator, the target type, TYPE and two
16249 constant operands OP0 and OP1, return the result of the
16250 relational operation. If the result is not a compile time
16251 constant, then return NULL_TREE. */
16253 static tree
16254 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16256 int result, invert;
16258 /* From here on, the only cases we handle are when the result is
16259 known to be a constant. */
16261 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16263 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16264 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16266 /* Handle the cases where either operand is a NaN. */
16267 if (real_isnan (c0) || real_isnan (c1))
16269 switch (code)
16271 case EQ_EXPR:
16272 case ORDERED_EXPR:
16273 result = 0;
16274 break;
16276 case NE_EXPR:
16277 case UNORDERED_EXPR:
16278 case UNLT_EXPR:
16279 case UNLE_EXPR:
16280 case UNGT_EXPR:
16281 case UNGE_EXPR:
16282 case UNEQ_EXPR:
16283 result = 1;
16284 break;
16286 case LT_EXPR:
16287 case LE_EXPR:
16288 case GT_EXPR:
16289 case GE_EXPR:
16290 case LTGT_EXPR:
16291 if (flag_trapping_math)
16292 return NULL_TREE;
16293 result = 0;
16294 break;
16296 default:
16297 gcc_unreachable ();
16300 return constant_boolean_node (result, type);
16303 return constant_boolean_node (real_compare (code, c0, c1), type);
16306 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16308 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16309 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16310 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16313 /* Handle equality/inequality of complex constants. */
16314 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16316 tree rcond = fold_relational_const (code, type,
16317 TREE_REALPART (op0),
16318 TREE_REALPART (op1));
16319 tree icond = fold_relational_const (code, type,
16320 TREE_IMAGPART (op0),
16321 TREE_IMAGPART (op1));
16322 if (code == EQ_EXPR)
16323 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16324 else if (code == NE_EXPR)
16325 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16326 else
16327 return NULL_TREE;
16330 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16332 unsigned count = VECTOR_CST_NELTS (op0);
16333 tree *elts = XALLOCAVEC (tree, count);
16334 gcc_assert (VECTOR_CST_NELTS (op1) == count
16335 && TYPE_VECTOR_SUBPARTS (type) == count);
16337 for (unsigned i = 0; i < count; i++)
16339 tree elem_type = TREE_TYPE (type);
16340 tree elem0 = VECTOR_CST_ELT (op0, i);
16341 tree elem1 = VECTOR_CST_ELT (op1, i);
16343 tree tem = fold_relational_const (code, elem_type,
16344 elem0, elem1);
16346 if (tem == NULL_TREE)
16347 return NULL_TREE;
16349 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16352 return build_vector (type, elts);
16355 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16357 To compute GT, swap the arguments and do LT.
16358 To compute GE, do LT and invert the result.
16359 To compute LE, swap the arguments, do LT and invert the result.
16360 To compute NE, do EQ and invert the result.
16362 Therefore, the code below must handle only EQ and LT. */
16364 if (code == LE_EXPR || code == GT_EXPR)
16366 tree tem = op0;
16367 op0 = op1;
16368 op1 = tem;
16369 code = swap_tree_comparison (code);
16372 /* Note that it is safe to invert for real values here because we
16373 have already handled the one case that it matters. */
16375 invert = 0;
16376 if (code == NE_EXPR || code == GE_EXPR)
16378 invert = 1;
16379 code = invert_tree_comparison (code, false);
16382 /* Compute a result for LT or EQ if args permit;
16383 Otherwise return T. */
16384 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16386 if (code == EQ_EXPR)
16387 result = tree_int_cst_equal (op0, op1);
16388 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16389 result = INT_CST_LT_UNSIGNED (op0, op1);
16390 else
16391 result = INT_CST_LT (op0, op1);
16393 else
16394 return NULL_TREE;
16396 if (invert)
16397 result ^= 1;
16398 return constant_boolean_node (result, type);
16401 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16402 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16403 itself. */
16405 tree
16406 fold_build_cleanup_point_expr (tree type, tree expr)
16408 /* If the expression does not have side effects then we don't have to wrap
16409 it with a cleanup point expression. */
16410 if (!TREE_SIDE_EFFECTS (expr))
16411 return expr;
16413 /* If the expression is a return, check to see if the expression inside the
16414 return has no side effects or the right hand side of the modify expression
16415 inside the return. If either don't have side effects set we don't need to
16416 wrap the expression in a cleanup point expression. Note we don't check the
16417 left hand side of the modify because it should always be a return decl. */
16418 if (TREE_CODE (expr) == RETURN_EXPR)
16420 tree op = TREE_OPERAND (expr, 0);
16421 if (!op || !TREE_SIDE_EFFECTS (op))
16422 return expr;
16423 op = TREE_OPERAND (op, 1);
16424 if (!TREE_SIDE_EFFECTS (op))
16425 return expr;
16428 return build1 (CLEANUP_POINT_EXPR, type, expr);
16431 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16432 of an indirection through OP0, or NULL_TREE if no simplification is
16433 possible. */
16435 tree
16436 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16438 tree sub = op0;
16439 tree subtype;
16441 STRIP_NOPS (sub);
16442 subtype = TREE_TYPE (sub);
16443 if (!POINTER_TYPE_P (subtype))
16444 return NULL_TREE;
16446 if (TREE_CODE (sub) == ADDR_EXPR)
16448 tree op = TREE_OPERAND (sub, 0);
16449 tree optype = TREE_TYPE (op);
16450 /* *&CONST_DECL -> to the value of the const decl. */
16451 if (TREE_CODE (op) == CONST_DECL)
16452 return DECL_INITIAL (op);
16453 /* *&p => p; make sure to handle *&"str"[cst] here. */
16454 if (type == optype)
16456 tree fop = fold_read_from_constant_string (op);
16457 if (fop)
16458 return fop;
16459 else
16460 return op;
16462 /* *(foo *)&fooarray => fooarray[0] */
16463 else if (TREE_CODE (optype) == ARRAY_TYPE
16464 && type == TREE_TYPE (optype)
16465 && (!in_gimple_form
16466 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16468 tree type_domain = TYPE_DOMAIN (optype);
16469 tree min_val = size_zero_node;
16470 if (type_domain && TYPE_MIN_VALUE (type_domain))
16471 min_val = TYPE_MIN_VALUE (type_domain);
16472 if (in_gimple_form
16473 && TREE_CODE (min_val) != INTEGER_CST)
16474 return NULL_TREE;
16475 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16476 NULL_TREE, NULL_TREE);
16478 /* *(foo *)&complexfoo => __real__ complexfoo */
16479 else if (TREE_CODE (optype) == COMPLEX_TYPE
16480 && type == TREE_TYPE (optype))
16481 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16482 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16483 else if (TREE_CODE (optype) == VECTOR_TYPE
16484 && type == TREE_TYPE (optype))
16486 tree part_width = TYPE_SIZE (type);
16487 tree index = bitsize_int (0);
16488 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16492 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16493 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16495 tree op00 = TREE_OPERAND (sub, 0);
16496 tree op01 = TREE_OPERAND (sub, 1);
16498 STRIP_NOPS (op00);
16499 if (TREE_CODE (op00) == ADDR_EXPR)
16501 tree op00type;
16502 op00 = TREE_OPERAND (op00, 0);
16503 op00type = TREE_TYPE (op00);
16505 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16506 if (TREE_CODE (op00type) == VECTOR_TYPE
16507 && type == TREE_TYPE (op00type))
16509 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16510 tree part_width = TYPE_SIZE (type);
16511 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16512 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16513 tree index = bitsize_int (indexi);
16515 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16516 return fold_build3_loc (loc,
16517 BIT_FIELD_REF, type, op00,
16518 part_width, index);
16521 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16522 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16523 && type == TREE_TYPE (op00type))
16525 tree size = TYPE_SIZE_UNIT (type);
16526 if (tree_int_cst_equal (size, op01))
16527 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16529 /* ((foo *)&fooarray)[1] => fooarray[1] */
16530 else if (TREE_CODE (op00type) == ARRAY_TYPE
16531 && type == TREE_TYPE (op00type))
16533 tree type_domain = TYPE_DOMAIN (op00type);
16534 tree min_val = size_zero_node;
16535 if (type_domain && TYPE_MIN_VALUE (type_domain))
16536 min_val = TYPE_MIN_VALUE (type_domain);
16537 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16538 TYPE_SIZE_UNIT (type));
16539 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16540 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16541 NULL_TREE, NULL_TREE);
16546 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16547 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16548 && type == TREE_TYPE (TREE_TYPE (subtype))
16549 && (!in_gimple_form
16550 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16552 tree type_domain;
16553 tree min_val = size_zero_node;
16554 sub = build_fold_indirect_ref_loc (loc, sub);
16555 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16556 if (type_domain && TYPE_MIN_VALUE (type_domain))
16557 min_val = TYPE_MIN_VALUE (type_domain);
16558 if (in_gimple_form
16559 && TREE_CODE (min_val) != INTEGER_CST)
16560 return NULL_TREE;
16561 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16562 NULL_TREE);
16565 return NULL_TREE;
16568 /* Builds an expression for an indirection through T, simplifying some
16569 cases. */
16571 tree
16572 build_fold_indirect_ref_loc (location_t loc, tree t)
16574 tree type = TREE_TYPE (TREE_TYPE (t));
16575 tree sub = fold_indirect_ref_1 (loc, type, t);
16577 if (sub)
16578 return sub;
16580 return build1_loc (loc, INDIRECT_REF, type, t);
16583 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16585 tree
16586 fold_indirect_ref_loc (location_t loc, tree t)
16588 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16590 if (sub)
16591 return sub;
16592 else
16593 return t;
16596 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16597 whose result is ignored. The type of the returned tree need not be
16598 the same as the original expression. */
16600 tree
16601 fold_ignored_result (tree t)
16603 if (!TREE_SIDE_EFFECTS (t))
16604 return integer_zero_node;
16606 for (;;)
16607 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16609 case tcc_unary:
16610 t = TREE_OPERAND (t, 0);
16611 break;
16613 case tcc_binary:
16614 case tcc_comparison:
16615 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16616 t = TREE_OPERAND (t, 0);
16617 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16618 t = TREE_OPERAND (t, 1);
16619 else
16620 return t;
16621 break;
16623 case tcc_expression:
16624 switch (TREE_CODE (t))
16626 case COMPOUND_EXPR:
16627 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16628 return t;
16629 t = TREE_OPERAND (t, 0);
16630 break;
16632 case COND_EXPR:
16633 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16634 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16635 return t;
16636 t = TREE_OPERAND (t, 0);
16637 break;
16639 default:
16640 return t;
16642 break;
16644 default:
16645 return t;
16649 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16650 This can only be applied to objects of a sizetype. */
16652 tree
16653 round_up_loc (location_t loc, tree value, int divisor)
16655 tree div = NULL_TREE;
16657 gcc_assert (divisor > 0);
16658 if (divisor == 1)
16659 return value;
16661 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16662 have to do anything. Only do this when we are not given a const,
16663 because in that case, this check is more expensive than just
16664 doing it. */
16665 if (TREE_CODE (value) != INTEGER_CST)
16667 div = build_int_cst (TREE_TYPE (value), divisor);
16669 if (multiple_of_p (TREE_TYPE (value), value, div))
16670 return value;
16673 /* If divisor is a power of two, simplify this to bit manipulation. */
16674 if (divisor == (divisor & -divisor))
16676 if (TREE_CODE (value) == INTEGER_CST)
16678 double_int val = tree_to_double_int (value);
16679 bool overflow_p;
16681 if ((val.low & (divisor - 1)) == 0)
16682 return value;
16684 overflow_p = TREE_OVERFLOW (value);
16685 val.low &= ~(divisor - 1);
16686 val.low += divisor;
16687 if (val.low == 0)
16689 val.high++;
16690 if (val.high == 0)
16691 overflow_p = true;
16694 return force_fit_type_double (TREE_TYPE (value), val,
16695 -1, overflow_p);
16697 else
16699 tree t;
16701 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16702 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16703 t = build_int_cst (TREE_TYPE (value), -divisor);
16704 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16707 else
16709 if (!div)
16710 div = build_int_cst (TREE_TYPE (value), divisor);
16711 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16712 value = size_binop_loc (loc, MULT_EXPR, value, div);
16715 return value;
16718 /* Likewise, but round down. */
16720 tree
16721 round_down_loc (location_t loc, tree value, int divisor)
16723 tree div = NULL_TREE;
16725 gcc_assert (divisor > 0);
16726 if (divisor == 1)
16727 return value;
16729 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16730 have to do anything. Only do this when we are not given a const,
16731 because in that case, this check is more expensive than just
16732 doing it. */
16733 if (TREE_CODE (value) != INTEGER_CST)
16735 div = build_int_cst (TREE_TYPE (value), divisor);
16737 if (multiple_of_p (TREE_TYPE (value), value, div))
16738 return value;
16741 /* If divisor is a power of two, simplify this to bit manipulation. */
16742 if (divisor == (divisor & -divisor))
16744 tree t;
16746 t = build_int_cst (TREE_TYPE (value), -divisor);
16747 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16749 else
16751 if (!div)
16752 div = build_int_cst (TREE_TYPE (value), divisor);
16753 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16754 value = size_binop_loc (loc, MULT_EXPR, value, div);
16757 return value;
16760 /* Returns the pointer to the base of the object addressed by EXP and
16761 extracts the information about the offset of the access, storing it
16762 to PBITPOS and POFFSET. */
16764 static tree
16765 split_address_to_core_and_offset (tree exp,
16766 HOST_WIDE_INT *pbitpos, tree *poffset)
16768 tree core;
16769 enum machine_mode mode;
16770 int unsignedp, volatilep;
16771 HOST_WIDE_INT bitsize;
16772 location_t loc = EXPR_LOCATION (exp);
16774 if (TREE_CODE (exp) == ADDR_EXPR)
16776 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16777 poffset, &mode, &unsignedp, &volatilep,
16778 false);
16779 core = build_fold_addr_expr_loc (loc, core);
16781 else
16783 core = exp;
16784 *pbitpos = 0;
16785 *poffset = NULL_TREE;
16788 return core;
16791 /* Returns true if addresses of E1 and E2 differ by a constant, false
16792 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16794 bool
16795 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16797 tree core1, core2;
16798 HOST_WIDE_INT bitpos1, bitpos2;
16799 tree toffset1, toffset2, tdiff, type;
16801 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16802 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16804 if (bitpos1 % BITS_PER_UNIT != 0
16805 || bitpos2 % BITS_PER_UNIT != 0
16806 || !operand_equal_p (core1, core2, 0))
16807 return false;
16809 if (toffset1 && toffset2)
16811 type = TREE_TYPE (toffset1);
16812 if (type != TREE_TYPE (toffset2))
16813 toffset2 = fold_convert (type, toffset2);
16815 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16816 if (!cst_and_fits_in_hwi (tdiff))
16817 return false;
16819 *diff = int_cst_value (tdiff);
16821 else if (toffset1 || toffset2)
16823 /* If only one of the offsets is non-constant, the difference cannot
16824 be a constant. */
16825 return false;
16827 else
16828 *diff = 0;
16830 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16831 return true;
16834 /* Simplify the floating point expression EXP when the sign of the
16835 result is not significant. Return NULL_TREE if no simplification
16836 is possible. */
16838 tree
16839 fold_strip_sign_ops (tree exp)
16841 tree arg0, arg1;
16842 location_t loc = EXPR_LOCATION (exp);
16844 switch (TREE_CODE (exp))
16846 case ABS_EXPR:
16847 case NEGATE_EXPR:
16848 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16849 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16851 case MULT_EXPR:
16852 case RDIV_EXPR:
16853 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16854 return NULL_TREE;
16855 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16856 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16857 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16858 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16859 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16860 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16861 break;
16863 case COMPOUND_EXPR:
16864 arg0 = TREE_OPERAND (exp, 0);
16865 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16866 if (arg1)
16867 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16868 break;
16870 case COND_EXPR:
16871 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16872 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16873 if (arg0 || arg1)
16874 return fold_build3_loc (loc,
16875 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16876 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16877 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16878 break;
16880 case CALL_EXPR:
16882 const enum built_in_function fcode = builtin_mathfn_code (exp);
16883 switch (fcode)
16885 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16886 /* Strip copysign function call, return the 1st argument. */
16887 arg0 = CALL_EXPR_ARG (exp, 0);
16888 arg1 = CALL_EXPR_ARG (exp, 1);
16889 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16891 default:
16892 /* Strip sign ops from the argument of "odd" math functions. */
16893 if (negate_mathfn_p (fcode))
16895 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16896 if (arg0)
16897 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16899 break;
16902 break;
16904 default:
16905 break;
16907 return NULL_TREE;