Fix memory leaks in tree-vect-data-refs.c
[official-gcc.git] / gcc / fold-const.c
blob553a9c37d7a46318857970828840b1b2d6bed868
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "tree-ssa-operands.h"
54 #include "optabs-query.h"
55 #include "cgraph.h"
56 #include "diagnostic-core.h"
57 #include "flags.h"
58 #include "alias.h"
59 #include "fold-const.h"
60 #include "fold-const-call.h"
61 #include "stor-layout.h"
62 #include "calls.h"
63 #include "tree-iterator.h"
64 #include "expr.h"
65 #include "intl.h"
66 #include "langhooks.h"
67 #include "tree-eh.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "builtins.h"
71 #include "generic-match.h"
72 #include "gimple-fold.h"
73 #include "params.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
78 #ifndef LOAD_EXTEND_OP
79 #define LOAD_EXTEND_OP(M) UNKNOWN
80 #endif
82 /* Nonzero if we are folding constants inside an initializer; zero
83 otherwise. */
84 int folding_initializer = 0;
86 /* The following constants represent a bit based encoding of GCC's
87 comparison operators. This encoding simplifies transformations
88 on relational comparison operators, such as AND and OR. */
89 enum comparison_code {
90 COMPCODE_FALSE = 0,
91 COMPCODE_LT = 1,
92 COMPCODE_EQ = 2,
93 COMPCODE_LE = 3,
94 COMPCODE_GT = 4,
95 COMPCODE_LTGT = 5,
96 COMPCODE_GE = 6,
97 COMPCODE_ORD = 7,
98 COMPCODE_UNORD = 8,
99 COMPCODE_UNLT = 9,
100 COMPCODE_UNEQ = 10,
101 COMPCODE_UNLE = 11,
102 COMPCODE_UNGT = 12,
103 COMPCODE_NE = 13,
104 COMPCODE_UNGE = 14,
105 COMPCODE_TRUE = 15
108 static bool negate_expr_p (tree);
109 static tree negate_expr (tree);
110 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
111 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
112 static enum comparison_code comparison_to_compcode (enum tree_code);
113 static enum tree_code compcode_to_comparison (enum comparison_code);
114 static int operand_equal_for_comparison_p (tree, tree, tree);
115 static int twoval_comparison_p (tree, tree *, tree *, int *);
116 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
117 static tree make_bit_field_ref (location_t, tree, tree,
118 HOST_WIDE_INT, HOST_WIDE_INT, int, int);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
122 HOST_WIDE_INT *,
123 machine_mode *, int *, int *, int *,
124 tree *, tree *);
125 static int simple_operand_p (const_tree);
126 static bool simple_operand_p_2 (tree);
127 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
128 static tree range_predecessor (tree);
129 static tree range_successor (tree);
130 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
131 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
132 static tree unextend (tree, int, int, tree);
133 static tree optimize_minmax_comparison (location_t, enum tree_code,
134 tree, tree, tree);
135 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
136 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
137 static tree fold_binary_op_with_conditional_arg (location_t,
138 enum tree_code, tree,
139 tree, tree,
140 tree, tree, int);
141 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (const_tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static tree fold_convert_const (enum tree_code, tree, tree);
147 static tree fold_view_convert_expr (tree, tree);
148 static bool vec_cst_ctor_to_array (tree, tree *);
151 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
152 Otherwise, return LOC. */
154 static location_t
155 expr_location_or (tree t, location_t loc)
157 location_t tloc = EXPR_LOCATION (t);
158 return tloc == UNKNOWN_LOCATION ? loc : tloc;
161 /* Similar to protected_set_expr_location, but never modify x in place,
162 if location can and needs to be set, unshare it. */
164 static inline tree
165 protected_set_expr_location_unshare (tree x, location_t loc)
167 if (CAN_HAVE_LOCATION_P (x)
168 && EXPR_LOCATION (x) != loc
169 && !(TREE_CODE (x) == SAVE_EXPR
170 || TREE_CODE (x) == TARGET_EXPR
171 || TREE_CODE (x) == BIND_EXPR))
173 x = copy_node (x);
174 SET_EXPR_LOCATION (x, loc);
176 return x;
179 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
180 division and returns the quotient. Otherwise returns
181 NULL_TREE. */
183 tree
184 div_if_zero_remainder (const_tree arg1, const_tree arg2)
186 widest_int quo;
188 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
189 SIGNED, &quo))
190 return wide_int_to_tree (TREE_TYPE (arg1), quo);
192 return NULL_TREE;
195 /* This is nonzero if we should defer warnings about undefined
196 overflow. This facility exists because these warnings are a
197 special case. The code to estimate loop iterations does not want
198 to issue any warnings, since it works with expressions which do not
199 occur in user code. Various bits of cleanup code call fold(), but
200 only use the result if it has certain characteristics (e.g., is a
201 constant); that code only wants to issue a warning if the result is
202 used. */
204 static int fold_deferring_overflow_warnings;
206 /* If a warning about undefined overflow is deferred, this is the
207 warning. Note that this may cause us to turn two warnings into
208 one, but that is fine since it is sufficient to only give one
209 warning per expression. */
211 static const char* fold_deferred_overflow_warning;
213 /* If a warning about undefined overflow is deferred, this is the
214 level at which the warning should be emitted. */
216 static enum warn_strict_overflow_code fold_deferred_overflow_code;
218 /* Start deferring overflow warnings. We could use a stack here to
219 permit nested calls, but at present it is not necessary. */
221 void
222 fold_defer_overflow_warnings (void)
224 ++fold_deferring_overflow_warnings;
227 /* Stop deferring overflow warnings. If there is a pending warning,
228 and ISSUE is true, then issue the warning if appropriate. STMT is
229 the statement with which the warning should be associated (used for
230 location information); STMT may be NULL. CODE is the level of the
231 warning--a warn_strict_overflow_code value. This function will use
232 the smaller of CODE and the deferred code when deciding whether to
233 issue the warning. CODE may be zero to mean to always use the
234 deferred code. */
236 void
237 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
239 const char *warnmsg;
240 location_t locus;
242 gcc_assert (fold_deferring_overflow_warnings > 0);
243 --fold_deferring_overflow_warnings;
244 if (fold_deferring_overflow_warnings > 0)
246 if (fold_deferred_overflow_warning != NULL
247 && code != 0
248 && code < (int) fold_deferred_overflow_code)
249 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
250 return;
253 warnmsg = fold_deferred_overflow_warning;
254 fold_deferred_overflow_warning = NULL;
256 if (!issue || warnmsg == NULL)
257 return;
259 if (gimple_no_warning_p (stmt))
260 return;
262 /* Use the smallest code level when deciding to issue the
263 warning. */
264 if (code == 0 || code > (int) fold_deferred_overflow_code)
265 code = fold_deferred_overflow_code;
267 if (!issue_strict_overflow_warning (code))
268 return;
270 if (stmt == NULL)
271 locus = input_location;
272 else
273 locus = gimple_location (stmt);
274 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
277 /* Stop deferring overflow warnings, ignoring any deferred
278 warnings. */
280 void
281 fold_undefer_and_ignore_overflow_warnings (void)
283 fold_undefer_overflow_warnings (false, NULL, 0);
286 /* Whether we are deferring overflow warnings. */
288 bool
289 fold_deferring_overflow_warnings_p (void)
291 return fold_deferring_overflow_warnings > 0;
294 /* This is called when we fold something based on the fact that signed
295 overflow is undefined. */
297 static void
298 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
300 if (fold_deferring_overflow_warnings > 0)
302 if (fold_deferred_overflow_warning == NULL
303 || wc < fold_deferred_overflow_code)
305 fold_deferred_overflow_warning = gmsgid;
306 fold_deferred_overflow_code = wc;
309 else if (issue_strict_overflow_warning (wc))
310 warning (OPT_Wstrict_overflow, gmsgid);
313 /* Return true if the built-in mathematical function specified by CODE
314 is odd, i.e. -f(x) == f(-x). */
316 bool
317 negate_mathfn_p (combined_fn fn)
319 switch (fn)
321 CASE_CFN_ASIN:
322 CASE_CFN_ASINH:
323 CASE_CFN_ATAN:
324 CASE_CFN_ATANH:
325 CASE_CFN_CASIN:
326 CASE_CFN_CASINH:
327 CASE_CFN_CATAN:
328 CASE_CFN_CATANH:
329 CASE_CFN_CBRT:
330 CASE_CFN_CPROJ:
331 CASE_CFN_CSIN:
332 CASE_CFN_CSINH:
333 CASE_CFN_CTAN:
334 CASE_CFN_CTANH:
335 CASE_CFN_ERF:
336 CASE_CFN_LLROUND:
337 CASE_CFN_LROUND:
338 CASE_CFN_ROUND:
339 CASE_CFN_SIN:
340 CASE_CFN_SINH:
341 CASE_CFN_TAN:
342 CASE_CFN_TANH:
343 CASE_CFN_TRUNC:
344 return true;
346 CASE_CFN_LLRINT:
347 CASE_CFN_LRINT:
348 CASE_CFN_NEARBYINT:
349 CASE_CFN_RINT:
350 return !flag_rounding_math;
352 default:
353 break;
355 return false;
358 /* Check whether we may negate an integer constant T without causing
359 overflow. */
361 bool
362 may_negate_without_overflow_p (const_tree t)
364 tree type;
366 gcc_assert (TREE_CODE (t) == INTEGER_CST);
368 type = TREE_TYPE (t);
369 if (TYPE_UNSIGNED (type))
370 return false;
372 return !wi::only_sign_bit_p (t);
375 /* Determine whether an expression T can be cheaply negated using
376 the function negate_expr without introducing undefined overflow. */
378 static bool
379 negate_expr_p (tree t)
381 tree type;
383 if (t == 0)
384 return false;
386 type = TREE_TYPE (t);
388 STRIP_SIGN_NOPS (t);
389 switch (TREE_CODE (t))
391 case INTEGER_CST:
392 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
393 return true;
395 /* Check that -CST will not overflow type. */
396 return may_negate_without_overflow_p (t);
397 case BIT_NOT_EXPR:
398 return (INTEGRAL_TYPE_P (type)
399 && TYPE_OVERFLOW_WRAPS (type));
401 case FIXED_CST:
402 return true;
404 case NEGATE_EXPR:
405 return !TYPE_OVERFLOW_SANITIZED (type);
407 case REAL_CST:
408 /* We want to canonicalize to positive real constants. Pretend
409 that only negative ones can be easily negated. */
410 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
412 case COMPLEX_CST:
413 return negate_expr_p (TREE_REALPART (t))
414 && negate_expr_p (TREE_IMAGPART (t));
416 case VECTOR_CST:
418 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
419 return true;
421 int count = TYPE_VECTOR_SUBPARTS (type), i;
423 for (i = 0; i < count; i++)
424 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
425 return false;
427 return true;
430 case COMPLEX_EXPR:
431 return negate_expr_p (TREE_OPERAND (t, 0))
432 && negate_expr_p (TREE_OPERAND (t, 1));
434 case CONJ_EXPR:
435 return negate_expr_p (TREE_OPERAND (t, 0));
437 case PLUS_EXPR:
438 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
439 || HONOR_SIGNED_ZEROS (element_mode (type))
440 || (INTEGRAL_TYPE_P (type)
441 && ! TYPE_OVERFLOW_WRAPS (type)))
442 return false;
443 /* -(A + B) -> (-B) - A. */
444 if (negate_expr_p (TREE_OPERAND (t, 1))
445 && reorder_operands_p (TREE_OPERAND (t, 0),
446 TREE_OPERAND (t, 1)))
447 return true;
448 /* -(A + B) -> (-A) - B. */
449 return negate_expr_p (TREE_OPERAND (t, 0));
451 case MINUS_EXPR:
452 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
453 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
454 && !HONOR_SIGNED_ZEROS (element_mode (type))
455 && (! INTEGRAL_TYPE_P (type)
456 || TYPE_OVERFLOW_WRAPS (type))
457 && reorder_operands_p (TREE_OPERAND (t, 0),
458 TREE_OPERAND (t, 1));
460 case MULT_EXPR:
461 if (TYPE_UNSIGNED (type))
462 break;
463 /* INT_MIN/n * n doesn't overflow while negating one operand it does
464 if n is a power of two. */
465 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
466 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
467 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
468 && ! integer_pow2p (TREE_OPERAND (t, 0)))
469 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
470 && ! integer_pow2p (TREE_OPERAND (t, 1)))))
471 break;
473 /* Fall through. */
475 case RDIV_EXPR:
476 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
477 return negate_expr_p (TREE_OPERAND (t, 1))
478 || negate_expr_p (TREE_OPERAND (t, 0));
479 break;
481 case TRUNC_DIV_EXPR:
482 case ROUND_DIV_EXPR:
483 case EXACT_DIV_EXPR:
484 if (TYPE_UNSIGNED (type))
485 break;
486 if (negate_expr_p (TREE_OPERAND (t, 0)))
487 return true;
488 /* In general we can't negate B in A / B, because if A is INT_MIN and
489 B is 1, we may turn this into INT_MIN / -1 which is undefined
490 and actually traps on some architectures. */
491 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
492 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
493 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
494 && ! integer_onep (TREE_OPERAND (t, 1))))
495 return negate_expr_p (TREE_OPERAND (t, 1));
496 break;
498 case NOP_EXPR:
499 /* Negate -((double)float) as (double)(-float). */
500 if (TREE_CODE (type) == REAL_TYPE)
502 tree tem = strip_float_extensions (t);
503 if (tem != t)
504 return negate_expr_p (tem);
506 break;
508 case CALL_EXPR:
509 /* Negate -f(x) as f(-x). */
510 if (negate_mathfn_p (get_call_combined_fn (t)))
511 return negate_expr_p (CALL_EXPR_ARG (t, 0));
512 break;
514 case RSHIFT_EXPR:
515 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
516 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
518 tree op1 = TREE_OPERAND (t, 1);
519 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
520 return true;
522 break;
524 default:
525 break;
527 return false;
530 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
531 simplification is possible.
532 If negate_expr_p would return true for T, NULL_TREE will never be
533 returned. */
535 static tree
536 fold_negate_expr (location_t loc, tree t)
538 tree type = TREE_TYPE (t);
539 tree tem;
541 switch (TREE_CODE (t))
543 /* Convert - (~A) to A + 1. */
544 case BIT_NOT_EXPR:
545 if (INTEGRAL_TYPE_P (type))
546 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
547 build_one_cst (type));
548 break;
550 case INTEGER_CST:
551 tem = fold_negate_const (t, type);
552 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
553 || (ANY_INTEGRAL_TYPE_P (type)
554 && !TYPE_OVERFLOW_TRAPS (type)
555 && TYPE_OVERFLOW_WRAPS (type))
556 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
557 return tem;
558 break;
560 case REAL_CST:
561 tem = fold_negate_const (t, type);
562 return tem;
564 case FIXED_CST:
565 tem = fold_negate_const (t, type);
566 return tem;
568 case COMPLEX_CST:
570 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
571 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
572 if (rpart && ipart)
573 return build_complex (type, rpart, ipart);
575 break;
577 case VECTOR_CST:
579 int count = TYPE_VECTOR_SUBPARTS (type), i;
580 tree *elts = XALLOCAVEC (tree, count);
582 for (i = 0; i < count; i++)
584 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
585 if (elts[i] == NULL_TREE)
586 return NULL_TREE;
589 return build_vector (type, elts);
592 case COMPLEX_EXPR:
593 if (negate_expr_p (t))
594 return fold_build2_loc (loc, COMPLEX_EXPR, type,
595 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
596 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
597 break;
599 case CONJ_EXPR:
600 if (negate_expr_p (t))
601 return fold_build1_loc (loc, CONJ_EXPR, type,
602 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
603 break;
605 case NEGATE_EXPR:
606 if (!TYPE_OVERFLOW_SANITIZED (type))
607 return TREE_OPERAND (t, 0);
608 break;
610 case PLUS_EXPR:
611 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
612 && !HONOR_SIGNED_ZEROS (element_mode (type)))
614 /* -(A + B) -> (-B) - A. */
615 if (negate_expr_p (TREE_OPERAND (t, 1))
616 && reorder_operands_p (TREE_OPERAND (t, 0),
617 TREE_OPERAND (t, 1)))
619 tem = negate_expr (TREE_OPERAND (t, 1));
620 return fold_build2_loc (loc, MINUS_EXPR, type,
621 tem, TREE_OPERAND (t, 0));
624 /* -(A + B) -> (-A) - B. */
625 if (negate_expr_p (TREE_OPERAND (t, 0)))
627 tem = negate_expr (TREE_OPERAND (t, 0));
628 return fold_build2_loc (loc, MINUS_EXPR, type,
629 tem, TREE_OPERAND (t, 1));
632 break;
634 case MINUS_EXPR:
635 /* - (A - B) -> B - A */
636 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
637 && !HONOR_SIGNED_ZEROS (element_mode (type))
638 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
639 return fold_build2_loc (loc, MINUS_EXPR, type,
640 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
641 break;
643 case MULT_EXPR:
644 if (TYPE_UNSIGNED (type))
645 break;
647 /* Fall through. */
649 case RDIV_EXPR:
650 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
652 tem = TREE_OPERAND (t, 1);
653 if (negate_expr_p (tem))
654 return fold_build2_loc (loc, TREE_CODE (t), type,
655 TREE_OPERAND (t, 0), negate_expr (tem));
656 tem = TREE_OPERAND (t, 0);
657 if (negate_expr_p (tem))
658 return fold_build2_loc (loc, TREE_CODE (t), type,
659 negate_expr (tem), TREE_OPERAND (t, 1));
661 break;
663 case TRUNC_DIV_EXPR:
664 case ROUND_DIV_EXPR:
665 case EXACT_DIV_EXPR:
666 if (TYPE_UNSIGNED (type))
667 break;
668 if (negate_expr_p (TREE_OPERAND (t, 0)))
669 return fold_build2_loc (loc, TREE_CODE (t), type,
670 negate_expr (TREE_OPERAND (t, 0)),
671 TREE_OPERAND (t, 1));
672 /* In general we can't negate B in A / B, because if A is INT_MIN and
673 B is 1, we may turn this into INT_MIN / -1 which is undefined
674 and actually traps on some architectures. */
675 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
676 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
677 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
678 && ! integer_onep (TREE_OPERAND (t, 1))))
679 && negate_expr_p (TREE_OPERAND (t, 1)))
680 return fold_build2_loc (loc, TREE_CODE (t), type,
681 TREE_OPERAND (t, 0),
682 negate_expr (TREE_OPERAND (t, 1)));
683 break;
685 case NOP_EXPR:
686 /* Convert -((double)float) into (double)(-float). */
687 if (TREE_CODE (type) == REAL_TYPE)
689 tem = strip_float_extensions (t);
690 if (tem != t && negate_expr_p (tem))
691 return fold_convert_loc (loc, type, negate_expr (tem));
693 break;
695 case CALL_EXPR:
696 /* Negate -f(x) as f(-x). */
697 if (negate_mathfn_p (get_call_combined_fn (t))
698 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
700 tree fndecl, arg;
702 fndecl = get_callee_fndecl (t);
703 arg = negate_expr (CALL_EXPR_ARG (t, 0));
704 return build_call_expr_loc (loc, fndecl, 1, arg);
706 break;
708 case RSHIFT_EXPR:
709 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
710 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
712 tree op1 = TREE_OPERAND (t, 1);
713 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
715 tree ntype = TYPE_UNSIGNED (type)
716 ? signed_type_for (type)
717 : unsigned_type_for (type);
718 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
719 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
720 return fold_convert_loc (loc, type, temp);
723 break;
725 default:
726 break;
729 return NULL_TREE;
732 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
733 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
734 return NULL_TREE. */
736 static tree
737 negate_expr (tree t)
739 tree type, tem;
740 location_t loc;
742 if (t == NULL_TREE)
743 return NULL_TREE;
745 loc = EXPR_LOCATION (t);
746 type = TREE_TYPE (t);
747 STRIP_SIGN_NOPS (t);
749 tem = fold_negate_expr (loc, t);
750 if (!tem)
751 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
752 return fold_convert_loc (loc, type, tem);
755 /* Split a tree IN into a constant, literal and variable parts that could be
756 combined with CODE to make IN. "constant" means an expression with
757 TREE_CONSTANT but that isn't an actual constant. CODE must be a
758 commutative arithmetic operation. Store the constant part into *CONP,
759 the literal in *LITP and return the variable part. If a part isn't
760 present, set it to null. If the tree does not decompose in this way,
761 return the entire tree as the variable part and the other parts as null.
763 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
764 case, we negate an operand that was subtracted. Except if it is a
765 literal for which we use *MINUS_LITP instead.
767 If NEGATE_P is true, we are negating all of IN, again except a literal
768 for which we use *MINUS_LITP instead.
770 If IN is itself a literal or constant, return it as appropriate.
772 Note that we do not guarantee that any of the three values will be the
773 same type as IN, but they will have the same signedness and mode. */
775 static tree
776 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
777 tree *minus_litp, int negate_p)
779 tree var = 0;
781 *conp = 0;
782 *litp = 0;
783 *minus_litp = 0;
785 /* Strip any conversions that don't change the machine mode or signedness. */
786 STRIP_SIGN_NOPS (in);
788 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
789 || TREE_CODE (in) == FIXED_CST)
790 *litp = in;
791 else if (TREE_CODE (in) == code
792 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
793 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
794 /* We can associate addition and subtraction together (even
795 though the C standard doesn't say so) for integers because
796 the value is not affected. For reals, the value might be
797 affected, so we can't. */
798 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
799 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
801 tree op0 = TREE_OPERAND (in, 0);
802 tree op1 = TREE_OPERAND (in, 1);
803 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
804 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
806 /* First see if either of the operands is a literal, then a constant. */
807 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
808 || TREE_CODE (op0) == FIXED_CST)
809 *litp = op0, op0 = 0;
810 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
811 || TREE_CODE (op1) == FIXED_CST)
812 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
814 if (op0 != 0 && TREE_CONSTANT (op0))
815 *conp = op0, op0 = 0;
816 else if (op1 != 0 && TREE_CONSTANT (op1))
817 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
819 /* If we haven't dealt with either operand, this is not a case we can
820 decompose. Otherwise, VAR is either of the ones remaining, if any. */
821 if (op0 != 0 && op1 != 0)
822 var = in;
823 else if (op0 != 0)
824 var = op0;
825 else
826 var = op1, neg_var_p = neg1_p;
828 /* Now do any needed negations. */
829 if (neg_litp_p)
830 *minus_litp = *litp, *litp = 0;
831 if (neg_conp_p)
832 *conp = negate_expr (*conp);
833 if (neg_var_p)
834 var = negate_expr (var);
836 else if (TREE_CODE (in) == BIT_NOT_EXPR
837 && code == PLUS_EXPR)
839 /* -X - 1 is folded to ~X, undo that here. */
840 *minus_litp = build_one_cst (TREE_TYPE (in));
841 var = negate_expr (TREE_OPERAND (in, 0));
843 else if (TREE_CONSTANT (in))
844 *conp = in;
845 else
846 var = in;
848 if (negate_p)
850 if (*litp)
851 *minus_litp = *litp, *litp = 0;
852 else if (*minus_litp)
853 *litp = *minus_litp, *minus_litp = 0;
854 *conp = negate_expr (*conp);
855 var = negate_expr (var);
858 return var;
861 /* Re-associate trees split by the above function. T1 and T2 are
862 either expressions to associate or null. Return the new
863 expression, if any. LOC is the location of the new expression. If
864 we build an operation, do it in TYPE and with CODE. */
866 static tree
867 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
869 if (t1 == 0)
870 return t2;
871 else if (t2 == 0)
872 return t1;
874 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
875 try to fold this since we will have infinite recursion. But do
876 deal with any NEGATE_EXPRs. */
877 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
878 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
880 if (code == PLUS_EXPR)
882 if (TREE_CODE (t1) == NEGATE_EXPR)
883 return build2_loc (loc, MINUS_EXPR, type,
884 fold_convert_loc (loc, type, t2),
885 fold_convert_loc (loc, type,
886 TREE_OPERAND (t1, 0)));
887 else if (TREE_CODE (t2) == NEGATE_EXPR)
888 return build2_loc (loc, MINUS_EXPR, type,
889 fold_convert_loc (loc, type, t1),
890 fold_convert_loc (loc, type,
891 TREE_OPERAND (t2, 0)));
892 else if (integer_zerop (t2))
893 return fold_convert_loc (loc, type, t1);
895 else if (code == MINUS_EXPR)
897 if (integer_zerop (t2))
898 return fold_convert_loc (loc, type, t1);
901 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
902 fold_convert_loc (loc, type, t2));
905 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
906 fold_convert_loc (loc, type, t2));
909 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
910 for use in int_const_binop, size_binop and size_diffop. */
912 static bool
913 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
915 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
916 return false;
917 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
918 return false;
920 switch (code)
922 case LSHIFT_EXPR:
923 case RSHIFT_EXPR:
924 case LROTATE_EXPR:
925 case RROTATE_EXPR:
926 return true;
928 default:
929 break;
932 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
933 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
934 && TYPE_MODE (type1) == TYPE_MODE (type2);
938 /* Combine two integer constants ARG1 and ARG2 under operation CODE
939 to produce a new constant. Return NULL_TREE if we don't know how
940 to evaluate CODE at compile-time. */
942 static tree
943 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
944 int overflowable)
946 wide_int res;
947 tree t;
948 tree type = TREE_TYPE (arg1);
949 signop sign = TYPE_SIGN (type);
950 bool overflow = false;
952 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
953 TYPE_SIGN (TREE_TYPE (parg2)));
955 switch (code)
957 case BIT_IOR_EXPR:
958 res = wi::bit_or (arg1, arg2);
959 break;
961 case BIT_XOR_EXPR:
962 res = wi::bit_xor (arg1, arg2);
963 break;
965 case BIT_AND_EXPR:
966 res = wi::bit_and (arg1, arg2);
967 break;
969 case RSHIFT_EXPR:
970 case LSHIFT_EXPR:
971 if (wi::neg_p (arg2))
973 arg2 = -arg2;
974 if (code == RSHIFT_EXPR)
975 code = LSHIFT_EXPR;
976 else
977 code = RSHIFT_EXPR;
980 if (code == RSHIFT_EXPR)
981 /* It's unclear from the C standard whether shifts can overflow.
982 The following code ignores overflow; perhaps a C standard
983 interpretation ruling is needed. */
984 res = wi::rshift (arg1, arg2, sign);
985 else
986 res = wi::lshift (arg1, arg2);
987 break;
989 case RROTATE_EXPR:
990 case LROTATE_EXPR:
991 if (wi::neg_p (arg2))
993 arg2 = -arg2;
994 if (code == RROTATE_EXPR)
995 code = LROTATE_EXPR;
996 else
997 code = RROTATE_EXPR;
1000 if (code == RROTATE_EXPR)
1001 res = wi::rrotate (arg1, arg2);
1002 else
1003 res = wi::lrotate (arg1, arg2);
1004 break;
1006 case PLUS_EXPR:
1007 res = wi::add (arg1, arg2, sign, &overflow);
1008 break;
1010 case MINUS_EXPR:
1011 res = wi::sub (arg1, arg2, sign, &overflow);
1012 break;
1014 case MULT_EXPR:
1015 res = wi::mul (arg1, arg2, sign, &overflow);
1016 break;
1018 case MULT_HIGHPART_EXPR:
1019 res = wi::mul_high (arg1, arg2, sign);
1020 break;
1022 case TRUNC_DIV_EXPR:
1023 case EXACT_DIV_EXPR:
1024 if (arg2 == 0)
1025 return NULL_TREE;
1026 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1027 break;
1029 case FLOOR_DIV_EXPR:
1030 if (arg2 == 0)
1031 return NULL_TREE;
1032 res = wi::div_floor (arg1, arg2, sign, &overflow);
1033 break;
1035 case CEIL_DIV_EXPR:
1036 if (arg2 == 0)
1037 return NULL_TREE;
1038 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1039 break;
1041 case ROUND_DIV_EXPR:
1042 if (arg2 == 0)
1043 return NULL_TREE;
1044 res = wi::div_round (arg1, arg2, sign, &overflow);
1045 break;
1047 case TRUNC_MOD_EXPR:
1048 if (arg2 == 0)
1049 return NULL_TREE;
1050 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1051 break;
1053 case FLOOR_MOD_EXPR:
1054 if (arg2 == 0)
1055 return NULL_TREE;
1056 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1057 break;
1059 case CEIL_MOD_EXPR:
1060 if (arg2 == 0)
1061 return NULL_TREE;
1062 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1063 break;
1065 case ROUND_MOD_EXPR:
1066 if (arg2 == 0)
1067 return NULL_TREE;
1068 res = wi::mod_round (arg1, arg2, sign, &overflow);
1069 break;
1071 case MIN_EXPR:
1072 res = wi::min (arg1, arg2, sign);
1073 break;
1075 case MAX_EXPR:
1076 res = wi::max (arg1, arg2, sign);
1077 break;
1079 default:
1080 return NULL_TREE;
1083 t = force_fit_type (type, res, overflowable,
1084 (((sign == SIGNED || overflowable == -1)
1085 && overflow)
1086 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1088 return t;
1091 tree
1092 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1094 return int_const_binop_1 (code, arg1, arg2, 1);
1097 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1098 constant. We assume ARG1 and ARG2 have the same data type, or at least
1099 are the same kind of constant and the same machine mode. Return zero if
1100 combining the constants is not allowed in the current operating mode. */
1102 static tree
1103 const_binop (enum tree_code code, tree arg1, tree arg2)
1105 /* Sanity check for the recursive cases. */
1106 if (!arg1 || !arg2)
1107 return NULL_TREE;
1109 STRIP_NOPS (arg1);
1110 STRIP_NOPS (arg2);
1112 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1114 if (code == POINTER_PLUS_EXPR)
1115 return int_const_binop (PLUS_EXPR,
1116 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1118 return int_const_binop (code, arg1, arg2);
1121 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1123 machine_mode mode;
1124 REAL_VALUE_TYPE d1;
1125 REAL_VALUE_TYPE d2;
1126 REAL_VALUE_TYPE value;
1127 REAL_VALUE_TYPE result;
1128 bool inexact;
1129 tree t, type;
1131 /* The following codes are handled by real_arithmetic. */
1132 switch (code)
1134 case PLUS_EXPR:
1135 case MINUS_EXPR:
1136 case MULT_EXPR:
1137 case RDIV_EXPR:
1138 case MIN_EXPR:
1139 case MAX_EXPR:
1140 break;
1142 default:
1143 return NULL_TREE;
1146 d1 = TREE_REAL_CST (arg1);
1147 d2 = TREE_REAL_CST (arg2);
1149 type = TREE_TYPE (arg1);
1150 mode = TYPE_MODE (type);
1152 /* Don't perform operation if we honor signaling NaNs and
1153 either operand is a NaN. */
1154 if (HONOR_SNANS (mode)
1155 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1156 return NULL_TREE;
1158 /* Don't perform operation if it would raise a division
1159 by zero exception. */
1160 if (code == RDIV_EXPR
1161 && real_equal (&d2, &dconst0)
1162 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1163 return NULL_TREE;
1165 /* If either operand is a NaN, just return it. Otherwise, set up
1166 for floating-point trap; we return an overflow. */
1167 if (REAL_VALUE_ISNAN (d1))
1168 return arg1;
1169 else if (REAL_VALUE_ISNAN (d2))
1170 return arg2;
1172 inexact = real_arithmetic (&value, code, &d1, &d2);
1173 real_convert (&result, mode, &value);
1175 /* Don't constant fold this floating point operation if
1176 the result has overflowed and flag_trapping_math. */
1177 if (flag_trapping_math
1178 && MODE_HAS_INFINITIES (mode)
1179 && REAL_VALUE_ISINF (result)
1180 && !REAL_VALUE_ISINF (d1)
1181 && !REAL_VALUE_ISINF (d2))
1182 return NULL_TREE;
1184 /* Don't constant fold this floating point operation if the
1185 result may dependent upon the run-time rounding mode and
1186 flag_rounding_math is set, or if GCC's software emulation
1187 is unable to accurately represent the result. */
1188 if ((flag_rounding_math
1189 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1190 && (inexact || !real_identical (&result, &value)))
1191 return NULL_TREE;
1193 t = build_real (type, result);
1195 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1196 return t;
1199 if (TREE_CODE (arg1) == FIXED_CST)
1201 FIXED_VALUE_TYPE f1;
1202 FIXED_VALUE_TYPE f2;
1203 FIXED_VALUE_TYPE result;
1204 tree t, type;
1205 int sat_p;
1206 bool overflow_p;
1208 /* The following codes are handled by fixed_arithmetic. */
1209 switch (code)
1211 case PLUS_EXPR:
1212 case MINUS_EXPR:
1213 case MULT_EXPR:
1214 case TRUNC_DIV_EXPR:
1215 if (TREE_CODE (arg2) != FIXED_CST)
1216 return NULL_TREE;
1217 f2 = TREE_FIXED_CST (arg2);
1218 break;
1220 case LSHIFT_EXPR:
1221 case RSHIFT_EXPR:
1223 if (TREE_CODE (arg2) != INTEGER_CST)
1224 return NULL_TREE;
1225 wide_int w2 = arg2;
1226 f2.data.high = w2.elt (1);
1227 f2.data.low = w2.elt (0);
1228 f2.mode = SImode;
1230 break;
1232 default:
1233 return NULL_TREE;
1236 f1 = TREE_FIXED_CST (arg1);
1237 type = TREE_TYPE (arg1);
1238 sat_p = TYPE_SATURATING (type);
1239 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1240 t = build_fixed (type, result);
1241 /* Propagate overflow flags. */
1242 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1243 TREE_OVERFLOW (t) = 1;
1244 return t;
1247 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1249 tree type = TREE_TYPE (arg1);
1250 tree r1 = TREE_REALPART (arg1);
1251 tree i1 = TREE_IMAGPART (arg1);
1252 tree r2 = TREE_REALPART (arg2);
1253 tree i2 = TREE_IMAGPART (arg2);
1254 tree real, imag;
1256 switch (code)
1258 case PLUS_EXPR:
1259 case MINUS_EXPR:
1260 real = const_binop (code, r1, r2);
1261 imag = const_binop (code, i1, i2);
1262 break;
1264 case MULT_EXPR:
1265 if (COMPLEX_FLOAT_TYPE_P (type))
1266 return do_mpc_arg2 (arg1, arg2, type,
1267 /* do_nonfinite= */ folding_initializer,
1268 mpc_mul);
1270 real = const_binop (MINUS_EXPR,
1271 const_binop (MULT_EXPR, r1, r2),
1272 const_binop (MULT_EXPR, i1, i2));
1273 imag = const_binop (PLUS_EXPR,
1274 const_binop (MULT_EXPR, r1, i2),
1275 const_binop (MULT_EXPR, i1, r2));
1276 break;
1278 case RDIV_EXPR:
1279 if (COMPLEX_FLOAT_TYPE_P (type))
1280 return do_mpc_arg2 (arg1, arg2, type,
1281 /* do_nonfinite= */ folding_initializer,
1282 mpc_div);
1283 /* Fallthru ... */
1284 case TRUNC_DIV_EXPR:
1285 case CEIL_DIV_EXPR:
1286 case FLOOR_DIV_EXPR:
1287 case ROUND_DIV_EXPR:
1288 if (flag_complex_method == 0)
1290 /* Keep this algorithm in sync with
1291 tree-complex.c:expand_complex_div_straight().
1293 Expand complex division to scalars, straightforward algorithm.
1294 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1295 t = br*br + bi*bi
1297 tree magsquared
1298 = const_binop (PLUS_EXPR,
1299 const_binop (MULT_EXPR, r2, r2),
1300 const_binop (MULT_EXPR, i2, i2));
1301 tree t1
1302 = const_binop (PLUS_EXPR,
1303 const_binop (MULT_EXPR, r1, r2),
1304 const_binop (MULT_EXPR, i1, i2));
1305 tree t2
1306 = const_binop (MINUS_EXPR,
1307 const_binop (MULT_EXPR, i1, r2),
1308 const_binop (MULT_EXPR, r1, i2));
1310 real = const_binop (code, t1, magsquared);
1311 imag = const_binop (code, t2, magsquared);
1313 else
1315 /* Keep this algorithm in sync with
1316 tree-complex.c:expand_complex_div_wide().
1318 Expand complex division to scalars, modified algorithm to minimize
1319 overflow with wide input ranges. */
1320 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1321 fold_abs_const (r2, TREE_TYPE (type)),
1322 fold_abs_const (i2, TREE_TYPE (type)));
1324 if (integer_nonzerop (compare))
1326 /* In the TRUE branch, we compute
1327 ratio = br/bi;
1328 div = (br * ratio) + bi;
1329 tr = (ar * ratio) + ai;
1330 ti = (ai * ratio) - ar;
1331 tr = tr / div;
1332 ti = ti / div; */
1333 tree ratio = const_binop (code, r2, i2);
1334 tree div = const_binop (PLUS_EXPR, i2,
1335 const_binop (MULT_EXPR, r2, ratio));
1336 real = const_binop (MULT_EXPR, r1, ratio);
1337 real = const_binop (PLUS_EXPR, real, i1);
1338 real = const_binop (code, real, div);
1340 imag = const_binop (MULT_EXPR, i1, ratio);
1341 imag = const_binop (MINUS_EXPR, imag, r1);
1342 imag = const_binop (code, imag, div);
1344 else
1346 /* In the FALSE branch, we compute
1347 ratio = d/c;
1348 divisor = (d * ratio) + c;
1349 tr = (b * ratio) + a;
1350 ti = b - (a * ratio);
1351 tr = tr / div;
1352 ti = ti / div; */
1353 tree ratio = const_binop (code, i2, r2);
1354 tree div = const_binop (PLUS_EXPR, r2,
1355 const_binop (MULT_EXPR, i2, ratio));
1357 real = const_binop (MULT_EXPR, i1, ratio);
1358 real = const_binop (PLUS_EXPR, real, r1);
1359 real = const_binop (code, real, div);
1361 imag = const_binop (MULT_EXPR, r1, ratio);
1362 imag = const_binop (MINUS_EXPR, i1, imag);
1363 imag = const_binop (code, imag, div);
1366 break;
1368 default:
1369 return NULL_TREE;
1372 if (real && imag)
1373 return build_complex (type, real, imag);
1376 if (TREE_CODE (arg1) == VECTOR_CST
1377 && TREE_CODE (arg2) == VECTOR_CST)
1379 tree type = TREE_TYPE (arg1);
1380 int count = TYPE_VECTOR_SUBPARTS (type), i;
1381 tree *elts = XALLOCAVEC (tree, count);
1383 for (i = 0; i < count; i++)
1385 tree elem1 = VECTOR_CST_ELT (arg1, i);
1386 tree elem2 = VECTOR_CST_ELT (arg2, i);
1388 elts[i] = const_binop (code, elem1, elem2);
1390 /* It is possible that const_binop cannot handle the given
1391 code and return NULL_TREE */
1392 if (elts[i] == NULL_TREE)
1393 return NULL_TREE;
1396 return build_vector (type, elts);
1399 /* Shifts allow a scalar offset for a vector. */
1400 if (TREE_CODE (arg1) == VECTOR_CST
1401 && TREE_CODE (arg2) == INTEGER_CST)
1403 tree type = TREE_TYPE (arg1);
1404 int count = TYPE_VECTOR_SUBPARTS (type), i;
1405 tree *elts = XALLOCAVEC (tree, count);
1407 for (i = 0; i < count; i++)
1409 tree elem1 = VECTOR_CST_ELT (arg1, i);
1411 elts[i] = const_binop (code, elem1, arg2);
1413 /* It is possible that const_binop cannot handle the given
1414 code and return NULL_TREE. */
1415 if (elts[i] == NULL_TREE)
1416 return NULL_TREE;
1419 return build_vector (type, elts);
1421 return NULL_TREE;
1424 /* Overload that adds a TYPE parameter to be able to dispatch
1425 to fold_relational_const. */
1427 tree
1428 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1430 if (TREE_CODE_CLASS (code) == tcc_comparison)
1431 return fold_relational_const (code, type, arg1, arg2);
1433 /* ??? Until we make the const_binop worker take the type of the
1434 result as argument put those cases that need it here. */
1435 switch (code)
1437 case COMPLEX_EXPR:
1438 if ((TREE_CODE (arg1) == REAL_CST
1439 && TREE_CODE (arg2) == REAL_CST)
1440 || (TREE_CODE (arg1) == INTEGER_CST
1441 && TREE_CODE (arg2) == INTEGER_CST))
1442 return build_complex (type, arg1, arg2);
1443 return NULL_TREE;
1445 case VEC_PACK_TRUNC_EXPR:
1446 case VEC_PACK_FIX_TRUNC_EXPR:
1448 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1449 tree *elts;
1451 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1452 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1453 if (TREE_CODE (arg1) != VECTOR_CST
1454 || TREE_CODE (arg2) != VECTOR_CST)
1455 return NULL_TREE;
1457 elts = XALLOCAVEC (tree, nelts);
1458 if (!vec_cst_ctor_to_array (arg1, elts)
1459 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1460 return NULL_TREE;
1462 for (i = 0; i < nelts; i++)
1464 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1465 ? NOP_EXPR : FIX_TRUNC_EXPR,
1466 TREE_TYPE (type), elts[i]);
1467 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1468 return NULL_TREE;
1471 return build_vector (type, elts);
1474 case VEC_WIDEN_MULT_LO_EXPR:
1475 case VEC_WIDEN_MULT_HI_EXPR:
1476 case VEC_WIDEN_MULT_EVEN_EXPR:
1477 case VEC_WIDEN_MULT_ODD_EXPR:
1479 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1480 unsigned int out, ofs, scale;
1481 tree *elts;
1483 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1484 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1485 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1486 return NULL_TREE;
1488 elts = XALLOCAVEC (tree, nelts * 4);
1489 if (!vec_cst_ctor_to_array (arg1, elts)
1490 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1491 return NULL_TREE;
1493 if (code == VEC_WIDEN_MULT_LO_EXPR)
1494 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1495 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1496 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1497 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1498 scale = 1, ofs = 0;
1499 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1500 scale = 1, ofs = 1;
1502 for (out = 0; out < nelts; out++)
1504 unsigned int in1 = (out << scale) + ofs;
1505 unsigned int in2 = in1 + nelts * 2;
1506 tree t1, t2;
1508 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1509 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1511 if (t1 == NULL_TREE || t2 == NULL_TREE)
1512 return NULL_TREE;
1513 elts[out] = const_binop (MULT_EXPR, t1, t2);
1514 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1515 return NULL_TREE;
1518 return build_vector (type, elts);
1521 default:;
1524 if (TREE_CODE_CLASS (code) != tcc_binary)
1525 return NULL_TREE;
1527 /* Make sure type and arg0 have the same saturating flag. */
1528 gcc_checking_assert (TYPE_SATURATING (type)
1529 == TYPE_SATURATING (TREE_TYPE (arg1)));
1531 return const_binop (code, arg1, arg2);
1534 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1535 Return zero if computing the constants is not possible. */
1537 tree
1538 const_unop (enum tree_code code, tree type, tree arg0)
1540 switch (code)
1542 CASE_CONVERT:
1543 case FLOAT_EXPR:
1544 case FIX_TRUNC_EXPR:
1545 case FIXED_CONVERT_EXPR:
1546 return fold_convert_const (code, type, arg0);
1548 case ADDR_SPACE_CONVERT_EXPR:
1549 /* If the source address is 0, and the source address space
1550 cannot have a valid object at 0, fold to dest type null. */
1551 if (integer_zerop (arg0)
1552 && !(targetm.addr_space.zero_address_valid
1553 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1554 return fold_convert_const (code, type, arg0);
1555 break;
1557 case VIEW_CONVERT_EXPR:
1558 return fold_view_convert_expr (type, arg0);
1560 case NEGATE_EXPR:
1562 /* Can't call fold_negate_const directly here as that doesn't
1563 handle all cases and we might not be able to negate some
1564 constants. */
1565 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1566 if (tem && CONSTANT_CLASS_P (tem))
1567 return tem;
1568 break;
1571 case ABS_EXPR:
1572 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1573 return fold_abs_const (arg0, type);
1574 break;
1576 case CONJ_EXPR:
1577 if (TREE_CODE (arg0) == COMPLEX_CST)
1579 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1580 TREE_TYPE (type));
1581 return build_complex (type, TREE_REALPART (arg0), ipart);
1583 break;
1585 case BIT_NOT_EXPR:
1586 if (TREE_CODE (arg0) == INTEGER_CST)
1587 return fold_not_const (arg0, type);
1588 /* Perform BIT_NOT_EXPR on each element individually. */
1589 else if (TREE_CODE (arg0) == VECTOR_CST)
1591 tree *elements;
1592 tree elem;
1593 unsigned count = VECTOR_CST_NELTS (arg0), i;
1595 elements = XALLOCAVEC (tree, count);
1596 for (i = 0; i < count; i++)
1598 elem = VECTOR_CST_ELT (arg0, i);
1599 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1600 if (elem == NULL_TREE)
1601 break;
1602 elements[i] = elem;
1604 if (i == count)
1605 return build_vector (type, elements);
1607 break;
1609 case TRUTH_NOT_EXPR:
1610 if (TREE_CODE (arg0) == INTEGER_CST)
1611 return constant_boolean_node (integer_zerop (arg0), type);
1612 break;
1614 case REALPART_EXPR:
1615 if (TREE_CODE (arg0) == COMPLEX_CST)
1616 return fold_convert (type, TREE_REALPART (arg0));
1617 break;
1619 case IMAGPART_EXPR:
1620 if (TREE_CODE (arg0) == COMPLEX_CST)
1621 return fold_convert (type, TREE_IMAGPART (arg0));
1622 break;
1624 case VEC_UNPACK_LO_EXPR:
1625 case VEC_UNPACK_HI_EXPR:
1626 case VEC_UNPACK_FLOAT_LO_EXPR:
1627 case VEC_UNPACK_FLOAT_HI_EXPR:
1629 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1630 tree *elts;
1631 enum tree_code subcode;
1633 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1634 if (TREE_CODE (arg0) != VECTOR_CST)
1635 return NULL_TREE;
1637 elts = XALLOCAVEC (tree, nelts * 2);
1638 if (!vec_cst_ctor_to_array (arg0, elts))
1639 return NULL_TREE;
1641 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1642 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1643 elts += nelts;
1645 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1646 subcode = NOP_EXPR;
1647 else
1648 subcode = FLOAT_EXPR;
1650 for (i = 0; i < nelts; i++)
1652 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1653 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1654 return NULL_TREE;
1657 return build_vector (type, elts);
1660 case REDUC_MIN_EXPR:
1661 case REDUC_MAX_EXPR:
1662 case REDUC_PLUS_EXPR:
1664 unsigned int nelts, i;
1665 tree *elts;
1666 enum tree_code subcode;
1668 if (TREE_CODE (arg0) != VECTOR_CST)
1669 return NULL_TREE;
1670 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1672 elts = XALLOCAVEC (tree, nelts);
1673 if (!vec_cst_ctor_to_array (arg0, elts))
1674 return NULL_TREE;
1676 switch (code)
1678 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1679 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1680 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1681 default: gcc_unreachable ();
1684 for (i = 1; i < nelts; i++)
1686 elts[0] = const_binop (subcode, elts[0], elts[i]);
1687 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1688 return NULL_TREE;
1691 return elts[0];
1694 default:
1695 break;
1698 return NULL_TREE;
1701 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1702 indicates which particular sizetype to create. */
1704 tree
1705 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1707 return build_int_cst (sizetype_tab[(int) kind], number);
1710 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1711 is a tree code. The type of the result is taken from the operands.
1712 Both must be equivalent integer types, ala int_binop_types_match_p.
1713 If the operands are constant, so is the result. */
1715 tree
1716 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1718 tree type = TREE_TYPE (arg0);
1720 if (arg0 == error_mark_node || arg1 == error_mark_node)
1721 return error_mark_node;
1723 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1724 TREE_TYPE (arg1)));
1726 /* Handle the special case of two integer constants faster. */
1727 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1729 /* And some specific cases even faster than that. */
1730 if (code == PLUS_EXPR)
1732 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1733 return arg1;
1734 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1735 return arg0;
1737 else if (code == MINUS_EXPR)
1739 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1740 return arg0;
1742 else if (code == MULT_EXPR)
1744 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1745 return arg1;
1748 /* Handle general case of two integer constants. For sizetype
1749 constant calculations we always want to know about overflow,
1750 even in the unsigned case. */
1751 return int_const_binop_1 (code, arg0, arg1, -1);
1754 return fold_build2_loc (loc, code, type, arg0, arg1);
1757 /* Given two values, either both of sizetype or both of bitsizetype,
1758 compute the difference between the two values. Return the value
1759 in signed type corresponding to the type of the operands. */
1761 tree
1762 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1764 tree type = TREE_TYPE (arg0);
1765 tree ctype;
1767 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1768 TREE_TYPE (arg1)));
1770 /* If the type is already signed, just do the simple thing. */
1771 if (!TYPE_UNSIGNED (type))
1772 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1774 if (type == sizetype)
1775 ctype = ssizetype;
1776 else if (type == bitsizetype)
1777 ctype = sbitsizetype;
1778 else
1779 ctype = signed_type_for (type);
1781 /* If either operand is not a constant, do the conversions to the signed
1782 type and subtract. The hardware will do the right thing with any
1783 overflow in the subtraction. */
1784 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1785 return size_binop_loc (loc, MINUS_EXPR,
1786 fold_convert_loc (loc, ctype, arg0),
1787 fold_convert_loc (loc, ctype, arg1));
1789 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1790 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1791 overflow) and negate (which can't either). Special-case a result
1792 of zero while we're here. */
1793 if (tree_int_cst_equal (arg0, arg1))
1794 return build_int_cst (ctype, 0);
1795 else if (tree_int_cst_lt (arg1, arg0))
1796 return fold_convert_loc (loc, ctype,
1797 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1798 else
1799 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1800 fold_convert_loc (loc, ctype,
1801 size_binop_loc (loc,
1802 MINUS_EXPR,
1803 arg1, arg0)));
1806 /* A subroutine of fold_convert_const handling conversions of an
1807 INTEGER_CST to another integer type. */
1809 static tree
1810 fold_convert_const_int_from_int (tree type, const_tree arg1)
1812 /* Given an integer constant, make new constant with new type,
1813 appropriately sign-extended or truncated. Use widest_int
1814 so that any extension is done according ARG1's type. */
1815 return force_fit_type (type, wi::to_widest (arg1),
1816 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1817 TREE_OVERFLOW (arg1));
1820 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1821 to an integer type. */
1823 static tree
1824 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1826 bool overflow = false;
1827 tree t;
1829 /* The following code implements the floating point to integer
1830 conversion rules required by the Java Language Specification,
1831 that IEEE NaNs are mapped to zero and values that overflow
1832 the target precision saturate, i.e. values greater than
1833 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1834 are mapped to INT_MIN. These semantics are allowed by the
1835 C and C++ standards that simply state that the behavior of
1836 FP-to-integer conversion is unspecified upon overflow. */
1838 wide_int val;
1839 REAL_VALUE_TYPE r;
1840 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1842 switch (code)
1844 case FIX_TRUNC_EXPR:
1845 real_trunc (&r, VOIDmode, &x);
1846 break;
1848 default:
1849 gcc_unreachable ();
1852 /* If R is NaN, return zero and show we have an overflow. */
1853 if (REAL_VALUE_ISNAN (r))
1855 overflow = true;
1856 val = wi::zero (TYPE_PRECISION (type));
1859 /* See if R is less than the lower bound or greater than the
1860 upper bound. */
1862 if (! overflow)
1864 tree lt = TYPE_MIN_VALUE (type);
1865 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1866 if (real_less (&r, &l))
1868 overflow = true;
1869 val = lt;
1873 if (! overflow)
1875 tree ut = TYPE_MAX_VALUE (type);
1876 if (ut)
1878 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1879 if (real_less (&u, &r))
1881 overflow = true;
1882 val = ut;
1887 if (! overflow)
1888 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1890 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1891 return t;
1894 /* A subroutine of fold_convert_const handling conversions of a
1895 FIXED_CST to an integer type. */
1897 static tree
1898 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1900 tree t;
1901 double_int temp, temp_trunc;
1902 unsigned int mode;
1904 /* Right shift FIXED_CST to temp by fbit. */
1905 temp = TREE_FIXED_CST (arg1).data;
1906 mode = TREE_FIXED_CST (arg1).mode;
1907 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1909 temp = temp.rshift (GET_MODE_FBIT (mode),
1910 HOST_BITS_PER_DOUBLE_INT,
1911 SIGNED_FIXED_POINT_MODE_P (mode));
1913 /* Left shift temp to temp_trunc by fbit. */
1914 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1915 HOST_BITS_PER_DOUBLE_INT,
1916 SIGNED_FIXED_POINT_MODE_P (mode));
1918 else
1920 temp = double_int_zero;
1921 temp_trunc = double_int_zero;
1924 /* If FIXED_CST is negative, we need to round the value toward 0.
1925 By checking if the fractional bits are not zero to add 1 to temp. */
1926 if (SIGNED_FIXED_POINT_MODE_P (mode)
1927 && temp_trunc.is_negative ()
1928 && TREE_FIXED_CST (arg1).data != temp_trunc)
1929 temp += double_int_one;
1931 /* Given a fixed-point constant, make new constant with new type,
1932 appropriately sign-extended or truncated. */
1933 t = force_fit_type (type, temp, -1,
1934 (temp.is_negative ()
1935 && (TYPE_UNSIGNED (type)
1936 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1937 | TREE_OVERFLOW (arg1));
1939 return t;
1942 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1943 to another floating point type. */
1945 static tree
1946 fold_convert_const_real_from_real (tree type, const_tree arg1)
1948 REAL_VALUE_TYPE value;
1949 tree t;
1951 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1952 t = build_real (type, value);
1954 /* If converting an infinity or NAN to a representation that doesn't
1955 have one, set the overflow bit so that we can produce some kind of
1956 error message at the appropriate point if necessary. It's not the
1957 most user-friendly message, but it's better than nothing. */
1958 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1959 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1960 TREE_OVERFLOW (t) = 1;
1961 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1962 && !MODE_HAS_NANS (TYPE_MODE (type)))
1963 TREE_OVERFLOW (t) = 1;
1964 /* Regular overflow, conversion produced an infinity in a mode that
1965 can't represent them. */
1966 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1967 && REAL_VALUE_ISINF (value)
1968 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1969 TREE_OVERFLOW (t) = 1;
1970 else
1971 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1972 return t;
1975 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1976 to a floating point type. */
1978 static tree
1979 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1981 REAL_VALUE_TYPE value;
1982 tree t;
1984 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1985 t = build_real (type, value);
1987 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1988 return t;
1991 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1992 to another fixed-point type. */
1994 static tree
1995 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1997 FIXED_VALUE_TYPE value;
1998 tree t;
1999 bool overflow_p;
2001 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2002 TYPE_SATURATING (type));
2003 t = build_fixed (type, value);
2005 /* Propagate overflow flags. */
2006 if (overflow_p | TREE_OVERFLOW (arg1))
2007 TREE_OVERFLOW (t) = 1;
2008 return t;
2011 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2012 to a fixed-point type. */
2014 static tree
2015 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2017 FIXED_VALUE_TYPE value;
2018 tree t;
2019 bool overflow_p;
2020 double_int di;
2022 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2024 di.low = TREE_INT_CST_ELT (arg1, 0);
2025 if (TREE_INT_CST_NUNITS (arg1) == 1)
2026 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2027 else
2028 di.high = TREE_INT_CST_ELT (arg1, 1);
2030 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2031 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2032 TYPE_SATURATING (type));
2033 t = build_fixed (type, value);
2035 /* Propagate overflow flags. */
2036 if (overflow_p | TREE_OVERFLOW (arg1))
2037 TREE_OVERFLOW (t) = 1;
2038 return t;
2041 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2042 to a fixed-point type. */
2044 static tree
2045 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2047 FIXED_VALUE_TYPE value;
2048 tree t;
2049 bool overflow_p;
2051 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2052 &TREE_REAL_CST (arg1),
2053 TYPE_SATURATING (type));
2054 t = build_fixed (type, value);
2056 /* Propagate overflow flags. */
2057 if (overflow_p | TREE_OVERFLOW (arg1))
2058 TREE_OVERFLOW (t) = 1;
2059 return t;
2062 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2063 type TYPE. If no simplification can be done return NULL_TREE. */
2065 static tree
2066 fold_convert_const (enum tree_code code, tree type, tree arg1)
2068 if (TREE_TYPE (arg1) == type)
2069 return arg1;
2071 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2072 || TREE_CODE (type) == OFFSET_TYPE)
2074 if (TREE_CODE (arg1) == INTEGER_CST)
2075 return fold_convert_const_int_from_int (type, arg1);
2076 else if (TREE_CODE (arg1) == REAL_CST)
2077 return fold_convert_const_int_from_real (code, type, arg1);
2078 else if (TREE_CODE (arg1) == FIXED_CST)
2079 return fold_convert_const_int_from_fixed (type, arg1);
2081 else if (TREE_CODE (type) == REAL_TYPE)
2083 if (TREE_CODE (arg1) == INTEGER_CST)
2084 return build_real_from_int_cst (type, arg1);
2085 else if (TREE_CODE (arg1) == REAL_CST)
2086 return fold_convert_const_real_from_real (type, arg1);
2087 else if (TREE_CODE (arg1) == FIXED_CST)
2088 return fold_convert_const_real_from_fixed (type, arg1);
2090 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2092 if (TREE_CODE (arg1) == FIXED_CST)
2093 return fold_convert_const_fixed_from_fixed (type, arg1);
2094 else if (TREE_CODE (arg1) == INTEGER_CST)
2095 return fold_convert_const_fixed_from_int (type, arg1);
2096 else if (TREE_CODE (arg1) == REAL_CST)
2097 return fold_convert_const_fixed_from_real (type, arg1);
2099 else if (TREE_CODE (type) == VECTOR_TYPE)
2101 if (TREE_CODE (arg1) == VECTOR_CST
2102 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2104 int len = TYPE_VECTOR_SUBPARTS (type);
2105 tree elttype = TREE_TYPE (type);
2106 tree *v = XALLOCAVEC (tree, len);
2107 for (int i = 0; i < len; ++i)
2109 tree elt = VECTOR_CST_ELT (arg1, i);
2110 tree cvt = fold_convert_const (code, elttype, elt);
2111 if (cvt == NULL_TREE)
2112 return NULL_TREE;
2113 v[i] = cvt;
2115 return build_vector (type, v);
2118 return NULL_TREE;
2121 /* Construct a vector of zero elements of vector type TYPE. */
2123 static tree
2124 build_zero_vector (tree type)
2126 tree t;
2128 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2129 return build_vector_from_val (type, t);
2132 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2134 bool
2135 fold_convertible_p (const_tree type, const_tree arg)
2137 tree orig = TREE_TYPE (arg);
2139 if (type == orig)
2140 return true;
2142 if (TREE_CODE (arg) == ERROR_MARK
2143 || TREE_CODE (type) == ERROR_MARK
2144 || TREE_CODE (orig) == ERROR_MARK)
2145 return false;
2147 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2148 return true;
2150 switch (TREE_CODE (type))
2152 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2153 case POINTER_TYPE: case REFERENCE_TYPE:
2154 case OFFSET_TYPE:
2155 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2156 || TREE_CODE (orig) == OFFSET_TYPE)
2157 return true;
2158 return (TREE_CODE (orig) == VECTOR_TYPE
2159 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2161 case REAL_TYPE:
2162 case FIXED_POINT_TYPE:
2163 case COMPLEX_TYPE:
2164 case VECTOR_TYPE:
2165 case VOID_TYPE:
2166 return TREE_CODE (type) == TREE_CODE (orig);
2168 default:
2169 return false;
2173 /* Convert expression ARG to type TYPE. Used by the middle-end for
2174 simple conversions in preference to calling the front-end's convert. */
2176 tree
2177 fold_convert_loc (location_t loc, tree type, tree arg)
2179 tree orig = TREE_TYPE (arg);
2180 tree tem;
2182 if (type == orig)
2183 return arg;
2185 if (TREE_CODE (arg) == ERROR_MARK
2186 || TREE_CODE (type) == ERROR_MARK
2187 || TREE_CODE (orig) == ERROR_MARK)
2188 return error_mark_node;
2190 switch (TREE_CODE (type))
2192 case POINTER_TYPE:
2193 case REFERENCE_TYPE:
2194 /* Handle conversions between pointers to different address spaces. */
2195 if (POINTER_TYPE_P (orig)
2196 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2197 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2198 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2199 /* fall through */
2201 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2202 case OFFSET_TYPE:
2203 if (TREE_CODE (arg) == INTEGER_CST)
2205 tem = fold_convert_const (NOP_EXPR, type, arg);
2206 if (tem != NULL_TREE)
2207 return tem;
2209 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2210 || TREE_CODE (orig) == OFFSET_TYPE)
2211 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2212 if (TREE_CODE (orig) == COMPLEX_TYPE)
2213 return fold_convert_loc (loc, type,
2214 fold_build1_loc (loc, REALPART_EXPR,
2215 TREE_TYPE (orig), arg));
2216 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2217 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2218 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2220 case REAL_TYPE:
2221 if (TREE_CODE (arg) == INTEGER_CST)
2223 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2224 if (tem != NULL_TREE)
2225 return tem;
2227 else if (TREE_CODE (arg) == REAL_CST)
2229 tem = fold_convert_const (NOP_EXPR, type, arg);
2230 if (tem != NULL_TREE)
2231 return tem;
2233 else if (TREE_CODE (arg) == FIXED_CST)
2235 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2236 if (tem != NULL_TREE)
2237 return tem;
2240 switch (TREE_CODE (orig))
2242 case INTEGER_TYPE:
2243 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2244 case POINTER_TYPE: case REFERENCE_TYPE:
2245 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2247 case REAL_TYPE:
2248 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2250 case FIXED_POINT_TYPE:
2251 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2253 case COMPLEX_TYPE:
2254 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2255 return fold_convert_loc (loc, type, tem);
2257 default:
2258 gcc_unreachable ();
2261 case FIXED_POINT_TYPE:
2262 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2263 || TREE_CODE (arg) == REAL_CST)
2265 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2266 if (tem != NULL_TREE)
2267 goto fold_convert_exit;
2270 switch (TREE_CODE (orig))
2272 case FIXED_POINT_TYPE:
2273 case INTEGER_TYPE:
2274 case ENUMERAL_TYPE:
2275 case BOOLEAN_TYPE:
2276 case REAL_TYPE:
2277 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2279 case COMPLEX_TYPE:
2280 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2281 return fold_convert_loc (loc, type, tem);
2283 default:
2284 gcc_unreachable ();
2287 case COMPLEX_TYPE:
2288 switch (TREE_CODE (orig))
2290 case INTEGER_TYPE:
2291 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2292 case POINTER_TYPE: case REFERENCE_TYPE:
2293 case REAL_TYPE:
2294 case FIXED_POINT_TYPE:
2295 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2296 fold_convert_loc (loc, TREE_TYPE (type), arg),
2297 fold_convert_loc (loc, TREE_TYPE (type),
2298 integer_zero_node));
2299 case COMPLEX_TYPE:
2301 tree rpart, ipart;
2303 if (TREE_CODE (arg) == COMPLEX_EXPR)
2305 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2306 TREE_OPERAND (arg, 0));
2307 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2308 TREE_OPERAND (arg, 1));
2309 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2312 arg = save_expr (arg);
2313 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2314 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2315 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2316 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2317 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2320 default:
2321 gcc_unreachable ();
2324 case VECTOR_TYPE:
2325 if (integer_zerop (arg))
2326 return build_zero_vector (type);
2327 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2328 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2329 || TREE_CODE (orig) == VECTOR_TYPE);
2330 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2332 case VOID_TYPE:
2333 tem = fold_ignored_result (arg);
2334 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2336 default:
2337 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2338 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2339 gcc_unreachable ();
2341 fold_convert_exit:
2342 protected_set_expr_location_unshare (tem, loc);
2343 return tem;
2346 /* Return false if expr can be assumed not to be an lvalue, true
2347 otherwise. */
2349 static bool
2350 maybe_lvalue_p (const_tree x)
2352 /* We only need to wrap lvalue tree codes. */
2353 switch (TREE_CODE (x))
2355 case VAR_DECL:
2356 case PARM_DECL:
2357 case RESULT_DECL:
2358 case LABEL_DECL:
2359 case FUNCTION_DECL:
2360 case SSA_NAME:
2362 case COMPONENT_REF:
2363 case MEM_REF:
2364 case INDIRECT_REF:
2365 case ARRAY_REF:
2366 case ARRAY_RANGE_REF:
2367 case BIT_FIELD_REF:
2368 case OBJ_TYPE_REF:
2370 case REALPART_EXPR:
2371 case IMAGPART_EXPR:
2372 case PREINCREMENT_EXPR:
2373 case PREDECREMENT_EXPR:
2374 case SAVE_EXPR:
2375 case TRY_CATCH_EXPR:
2376 case WITH_CLEANUP_EXPR:
2377 case COMPOUND_EXPR:
2378 case MODIFY_EXPR:
2379 case TARGET_EXPR:
2380 case COND_EXPR:
2381 case BIND_EXPR:
2382 break;
2384 default:
2385 /* Assume the worst for front-end tree codes. */
2386 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2387 break;
2388 return false;
2391 return true;
2394 /* Return an expr equal to X but certainly not valid as an lvalue. */
2396 tree
2397 non_lvalue_loc (location_t loc, tree x)
2399 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2400 us. */
2401 if (in_gimple_form)
2402 return x;
2404 if (! maybe_lvalue_p (x))
2405 return x;
2406 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2409 /* When pedantic, return an expr equal to X but certainly not valid as a
2410 pedantic lvalue. Otherwise, return X. */
2412 static tree
2413 pedantic_non_lvalue_loc (location_t loc, tree x)
2415 return protected_set_expr_location_unshare (x, loc);
2418 /* Given a tree comparison code, return the code that is the logical inverse.
2419 It is generally not safe to do this for floating-point comparisons, except
2420 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2421 ERROR_MARK in this case. */
2423 enum tree_code
2424 invert_tree_comparison (enum tree_code code, bool honor_nans)
2426 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2427 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2428 return ERROR_MARK;
2430 switch (code)
2432 case EQ_EXPR:
2433 return NE_EXPR;
2434 case NE_EXPR:
2435 return EQ_EXPR;
2436 case GT_EXPR:
2437 return honor_nans ? UNLE_EXPR : LE_EXPR;
2438 case GE_EXPR:
2439 return honor_nans ? UNLT_EXPR : LT_EXPR;
2440 case LT_EXPR:
2441 return honor_nans ? UNGE_EXPR : GE_EXPR;
2442 case LE_EXPR:
2443 return honor_nans ? UNGT_EXPR : GT_EXPR;
2444 case LTGT_EXPR:
2445 return UNEQ_EXPR;
2446 case UNEQ_EXPR:
2447 return LTGT_EXPR;
2448 case UNGT_EXPR:
2449 return LE_EXPR;
2450 case UNGE_EXPR:
2451 return LT_EXPR;
2452 case UNLT_EXPR:
2453 return GE_EXPR;
2454 case UNLE_EXPR:
2455 return GT_EXPR;
2456 case ORDERED_EXPR:
2457 return UNORDERED_EXPR;
2458 case UNORDERED_EXPR:
2459 return ORDERED_EXPR;
2460 default:
2461 gcc_unreachable ();
2465 /* Similar, but return the comparison that results if the operands are
2466 swapped. This is safe for floating-point. */
2468 enum tree_code
2469 swap_tree_comparison (enum tree_code code)
2471 switch (code)
2473 case EQ_EXPR:
2474 case NE_EXPR:
2475 case ORDERED_EXPR:
2476 case UNORDERED_EXPR:
2477 case LTGT_EXPR:
2478 case UNEQ_EXPR:
2479 return code;
2480 case GT_EXPR:
2481 return LT_EXPR;
2482 case GE_EXPR:
2483 return LE_EXPR;
2484 case LT_EXPR:
2485 return GT_EXPR;
2486 case LE_EXPR:
2487 return GE_EXPR;
2488 case UNGT_EXPR:
2489 return UNLT_EXPR;
2490 case UNGE_EXPR:
2491 return UNLE_EXPR;
2492 case UNLT_EXPR:
2493 return UNGT_EXPR;
2494 case UNLE_EXPR:
2495 return UNGE_EXPR;
2496 default:
2497 gcc_unreachable ();
2502 /* Convert a comparison tree code from an enum tree_code representation
2503 into a compcode bit-based encoding. This function is the inverse of
2504 compcode_to_comparison. */
2506 static enum comparison_code
2507 comparison_to_compcode (enum tree_code code)
2509 switch (code)
2511 case LT_EXPR:
2512 return COMPCODE_LT;
2513 case EQ_EXPR:
2514 return COMPCODE_EQ;
2515 case LE_EXPR:
2516 return COMPCODE_LE;
2517 case GT_EXPR:
2518 return COMPCODE_GT;
2519 case NE_EXPR:
2520 return COMPCODE_NE;
2521 case GE_EXPR:
2522 return COMPCODE_GE;
2523 case ORDERED_EXPR:
2524 return COMPCODE_ORD;
2525 case UNORDERED_EXPR:
2526 return COMPCODE_UNORD;
2527 case UNLT_EXPR:
2528 return COMPCODE_UNLT;
2529 case UNEQ_EXPR:
2530 return COMPCODE_UNEQ;
2531 case UNLE_EXPR:
2532 return COMPCODE_UNLE;
2533 case UNGT_EXPR:
2534 return COMPCODE_UNGT;
2535 case LTGT_EXPR:
2536 return COMPCODE_LTGT;
2537 case UNGE_EXPR:
2538 return COMPCODE_UNGE;
2539 default:
2540 gcc_unreachable ();
2544 /* Convert a compcode bit-based encoding of a comparison operator back
2545 to GCC's enum tree_code representation. This function is the
2546 inverse of comparison_to_compcode. */
2548 static enum tree_code
2549 compcode_to_comparison (enum comparison_code code)
2551 switch (code)
2553 case COMPCODE_LT:
2554 return LT_EXPR;
2555 case COMPCODE_EQ:
2556 return EQ_EXPR;
2557 case COMPCODE_LE:
2558 return LE_EXPR;
2559 case COMPCODE_GT:
2560 return GT_EXPR;
2561 case COMPCODE_NE:
2562 return NE_EXPR;
2563 case COMPCODE_GE:
2564 return GE_EXPR;
2565 case COMPCODE_ORD:
2566 return ORDERED_EXPR;
2567 case COMPCODE_UNORD:
2568 return UNORDERED_EXPR;
2569 case COMPCODE_UNLT:
2570 return UNLT_EXPR;
2571 case COMPCODE_UNEQ:
2572 return UNEQ_EXPR;
2573 case COMPCODE_UNLE:
2574 return UNLE_EXPR;
2575 case COMPCODE_UNGT:
2576 return UNGT_EXPR;
2577 case COMPCODE_LTGT:
2578 return LTGT_EXPR;
2579 case COMPCODE_UNGE:
2580 return UNGE_EXPR;
2581 default:
2582 gcc_unreachable ();
2586 /* Return a tree for the comparison which is the combination of
2587 doing the AND or OR (depending on CODE) of the two operations LCODE
2588 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2589 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2590 if this makes the transformation invalid. */
2592 tree
2593 combine_comparisons (location_t loc,
2594 enum tree_code code, enum tree_code lcode,
2595 enum tree_code rcode, tree truth_type,
2596 tree ll_arg, tree lr_arg)
2598 bool honor_nans = HONOR_NANS (ll_arg);
2599 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2600 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2601 int compcode;
2603 switch (code)
2605 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2606 compcode = lcompcode & rcompcode;
2607 break;
2609 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2610 compcode = lcompcode | rcompcode;
2611 break;
2613 default:
2614 return NULL_TREE;
2617 if (!honor_nans)
2619 /* Eliminate unordered comparisons, as well as LTGT and ORD
2620 which are not used unless the mode has NaNs. */
2621 compcode &= ~COMPCODE_UNORD;
2622 if (compcode == COMPCODE_LTGT)
2623 compcode = COMPCODE_NE;
2624 else if (compcode == COMPCODE_ORD)
2625 compcode = COMPCODE_TRUE;
2627 else if (flag_trapping_math)
2629 /* Check that the original operation and the optimized ones will trap
2630 under the same condition. */
2631 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2632 && (lcompcode != COMPCODE_EQ)
2633 && (lcompcode != COMPCODE_ORD);
2634 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2635 && (rcompcode != COMPCODE_EQ)
2636 && (rcompcode != COMPCODE_ORD);
2637 bool trap = (compcode & COMPCODE_UNORD) == 0
2638 && (compcode != COMPCODE_EQ)
2639 && (compcode != COMPCODE_ORD);
2641 /* In a short-circuited boolean expression the LHS might be
2642 such that the RHS, if evaluated, will never trap. For
2643 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2644 if neither x nor y is NaN. (This is a mixed blessing: for
2645 example, the expression above will never trap, hence
2646 optimizing it to x < y would be invalid). */
2647 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2648 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2649 rtrap = false;
2651 /* If the comparison was short-circuited, and only the RHS
2652 trapped, we may now generate a spurious trap. */
2653 if (rtrap && !ltrap
2654 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2655 return NULL_TREE;
2657 /* If we changed the conditions that cause a trap, we lose. */
2658 if ((ltrap || rtrap) != trap)
2659 return NULL_TREE;
2662 if (compcode == COMPCODE_TRUE)
2663 return constant_boolean_node (true, truth_type);
2664 else if (compcode == COMPCODE_FALSE)
2665 return constant_boolean_node (false, truth_type);
2666 else
2668 enum tree_code tcode;
2670 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2671 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2675 /* Return nonzero if two operands (typically of the same tree node)
2676 are necessarily equal. FLAGS modifies behavior as follows:
2678 If OEP_ONLY_CONST is set, only return nonzero for constants.
2679 This function tests whether the operands are indistinguishable;
2680 it does not test whether they are equal using C's == operation.
2681 The distinction is important for IEEE floating point, because
2682 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2683 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2685 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2686 even though it may hold multiple values during a function.
2687 This is because a GCC tree node guarantees that nothing else is
2688 executed between the evaluation of its "operands" (which may often
2689 be evaluated in arbitrary order). Hence if the operands themselves
2690 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2691 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2692 unset means assuming isochronic (or instantaneous) tree equivalence.
2693 Unless comparing arbitrary expression trees, such as from different
2694 statements, this flag can usually be left unset.
2696 If OEP_PURE_SAME is set, then pure functions with identical arguments
2697 are considered the same. It is used when the caller has other ways
2698 to ensure that global memory is unchanged in between.
2700 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2701 not values of expressions.
2703 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2704 any operand with side effect. This is unnecesarily conservative in the
2705 case we know that arg0 and arg1 are in disjoint code paths (such as in
2706 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2707 addresses with TREE_CONSTANT flag set so we know that &var == &var
2708 even if var is volatile. */
2711 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2713 /* If either is ERROR_MARK, they aren't equal. */
2714 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2715 || TREE_TYPE (arg0) == error_mark_node
2716 || TREE_TYPE (arg1) == error_mark_node)
2717 return 0;
2719 /* Similar, if either does not have a type (like a released SSA name),
2720 they aren't equal. */
2721 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2722 return 0;
2724 /* We cannot consider pointers to different address space equal. */
2725 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2726 && POINTER_TYPE_P (TREE_TYPE (arg1))
2727 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2728 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2729 return 0;
2731 /* Check equality of integer constants before bailing out due to
2732 precision differences. */
2733 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2735 /* Address of INTEGER_CST is not defined; check that we did not forget
2736 to drop the OEP_ADDRESS_OF flags. */
2737 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2738 return tree_int_cst_equal (arg0, arg1);
2741 if (!(flags & OEP_ADDRESS_OF))
2743 /* If both types don't have the same signedness, then we can't consider
2744 them equal. We must check this before the STRIP_NOPS calls
2745 because they may change the signedness of the arguments. As pointers
2746 strictly don't have a signedness, require either two pointers or
2747 two non-pointers as well. */
2748 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2749 || POINTER_TYPE_P (TREE_TYPE (arg0))
2750 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2751 return 0;
2753 /* If both types don't have the same precision, then it is not safe
2754 to strip NOPs. */
2755 if (element_precision (TREE_TYPE (arg0))
2756 != element_precision (TREE_TYPE (arg1)))
2757 return 0;
2759 STRIP_NOPS (arg0);
2760 STRIP_NOPS (arg1);
2762 #if 0
2763 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2764 sanity check once the issue is solved. */
2765 else
2766 /* Addresses of conversions and SSA_NAMEs (and many other things)
2767 are not defined. Check that we did not forget to drop the
2768 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2769 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2770 && TREE_CODE (arg0) != SSA_NAME);
2771 #endif
2773 /* In case both args are comparisons but with different comparison
2774 code, try to swap the comparison operands of one arg to produce
2775 a match and compare that variant. */
2776 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2777 && COMPARISON_CLASS_P (arg0)
2778 && COMPARISON_CLASS_P (arg1))
2780 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2782 if (TREE_CODE (arg0) == swap_code)
2783 return operand_equal_p (TREE_OPERAND (arg0, 0),
2784 TREE_OPERAND (arg1, 1), flags)
2785 && operand_equal_p (TREE_OPERAND (arg0, 1),
2786 TREE_OPERAND (arg1, 0), flags);
2789 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2791 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2792 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2794 else if (flags & OEP_ADDRESS_OF)
2796 /* If we are interested in comparing addresses ignore
2797 MEM_REF wrappings of the base that can appear just for
2798 TBAA reasons. */
2799 if (TREE_CODE (arg0) == MEM_REF
2800 && DECL_P (arg1)
2801 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2802 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2803 && integer_zerop (TREE_OPERAND (arg0, 1)))
2804 return 1;
2805 else if (TREE_CODE (arg1) == MEM_REF
2806 && DECL_P (arg0)
2807 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2808 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2809 && integer_zerop (TREE_OPERAND (arg1, 1)))
2810 return 1;
2811 return 0;
2813 else
2814 return 0;
2817 /* When not checking adddresses, this is needed for conversions and for
2818 COMPONENT_REF. Might as well play it safe and always test this. */
2819 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2820 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2821 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2822 && !(flags & OEP_ADDRESS_OF)))
2823 return 0;
2825 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2826 We don't care about side effects in that case because the SAVE_EXPR
2827 takes care of that for us. In all other cases, two expressions are
2828 equal if they have no side effects. If we have two identical
2829 expressions with side effects that should be treated the same due
2830 to the only side effects being identical SAVE_EXPR's, that will
2831 be detected in the recursive calls below.
2832 If we are taking an invariant address of two identical objects
2833 they are necessarily equal as well. */
2834 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2835 && (TREE_CODE (arg0) == SAVE_EXPR
2836 || (flags & OEP_MATCH_SIDE_EFFECTS)
2837 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2838 return 1;
2840 /* Next handle constant cases, those for which we can return 1 even
2841 if ONLY_CONST is set. */
2842 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2843 switch (TREE_CODE (arg0))
2845 case INTEGER_CST:
2846 return tree_int_cst_equal (arg0, arg1);
2848 case FIXED_CST:
2849 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2850 TREE_FIXED_CST (arg1));
2852 case REAL_CST:
2853 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2854 return 1;
2857 if (!HONOR_SIGNED_ZEROS (arg0))
2859 /* If we do not distinguish between signed and unsigned zero,
2860 consider them equal. */
2861 if (real_zerop (arg0) && real_zerop (arg1))
2862 return 1;
2864 return 0;
2866 case VECTOR_CST:
2868 unsigned i;
2870 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2871 return 0;
2873 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2875 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2876 VECTOR_CST_ELT (arg1, i), flags))
2877 return 0;
2879 return 1;
2882 case COMPLEX_CST:
2883 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2884 flags)
2885 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2886 flags));
2888 case STRING_CST:
2889 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2890 && ! memcmp (TREE_STRING_POINTER (arg0),
2891 TREE_STRING_POINTER (arg1),
2892 TREE_STRING_LENGTH (arg0)));
2894 case ADDR_EXPR:
2895 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2896 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2897 flags | OEP_ADDRESS_OF
2898 | OEP_MATCH_SIDE_EFFECTS);
2899 case CONSTRUCTOR:
2900 /* In GIMPLE empty constructors are allowed in initializers of
2901 aggregates. */
2902 return (!vec_safe_length (CONSTRUCTOR_ELTS (arg0))
2903 && !vec_safe_length (CONSTRUCTOR_ELTS (arg1)));
2904 default:
2905 break;
2908 if (flags & OEP_ONLY_CONST)
2909 return 0;
2911 /* Define macros to test an operand from arg0 and arg1 for equality and a
2912 variant that allows null and views null as being different from any
2913 non-null value. In the latter case, if either is null, the both
2914 must be; otherwise, do the normal comparison. */
2915 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2916 TREE_OPERAND (arg1, N), flags)
2918 #define OP_SAME_WITH_NULL(N) \
2919 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2920 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2922 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2924 case tcc_unary:
2925 /* Two conversions are equal only if signedness and modes match. */
2926 switch (TREE_CODE (arg0))
2928 CASE_CONVERT:
2929 case FIX_TRUNC_EXPR:
2930 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2931 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2932 return 0;
2933 break;
2934 default:
2935 break;
2938 return OP_SAME (0);
2941 case tcc_comparison:
2942 case tcc_binary:
2943 if (OP_SAME (0) && OP_SAME (1))
2944 return 1;
2946 /* For commutative ops, allow the other order. */
2947 return (commutative_tree_code (TREE_CODE (arg0))
2948 && operand_equal_p (TREE_OPERAND (arg0, 0),
2949 TREE_OPERAND (arg1, 1), flags)
2950 && operand_equal_p (TREE_OPERAND (arg0, 1),
2951 TREE_OPERAND (arg1, 0), flags));
2953 case tcc_reference:
2954 /* If either of the pointer (or reference) expressions we are
2955 dereferencing contain a side effect, these cannot be equal,
2956 but their addresses can be. */
2957 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
2958 && (TREE_SIDE_EFFECTS (arg0)
2959 || TREE_SIDE_EFFECTS (arg1)))
2960 return 0;
2962 switch (TREE_CODE (arg0))
2964 case INDIRECT_REF:
2965 if (!(flags & OEP_ADDRESS_OF)
2966 && (TYPE_ALIGN (TREE_TYPE (arg0))
2967 != TYPE_ALIGN (TREE_TYPE (arg1))))
2968 return 0;
2969 flags &= ~OEP_ADDRESS_OF;
2970 return OP_SAME (0);
2972 case REALPART_EXPR:
2973 case IMAGPART_EXPR:
2974 case VIEW_CONVERT_EXPR:
2975 return OP_SAME (0);
2977 case TARGET_MEM_REF:
2978 case MEM_REF:
2979 if (!(flags & OEP_ADDRESS_OF))
2981 /* Require equal access sizes */
2982 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
2983 && (!TYPE_SIZE (TREE_TYPE (arg0))
2984 || !TYPE_SIZE (TREE_TYPE (arg1))
2985 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2986 TYPE_SIZE (TREE_TYPE (arg1)),
2987 flags)))
2988 return 0;
2989 /* Verify that accesses are TBAA compatible. */
2990 if (!alias_ptr_types_compatible_p
2991 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2992 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2993 || (MR_DEPENDENCE_CLIQUE (arg0)
2994 != MR_DEPENDENCE_CLIQUE (arg1))
2995 || (MR_DEPENDENCE_BASE (arg0)
2996 != MR_DEPENDENCE_BASE (arg1)))
2997 return 0;
2998 /* Verify that alignment is compatible. */
2999 if (TYPE_ALIGN (TREE_TYPE (arg0))
3000 != TYPE_ALIGN (TREE_TYPE (arg1)))
3001 return 0;
3003 flags &= ~OEP_ADDRESS_OF;
3004 return (OP_SAME (0) && OP_SAME (1)
3005 /* TARGET_MEM_REF require equal extra operands. */
3006 && (TREE_CODE (arg0) != TARGET_MEM_REF
3007 || (OP_SAME_WITH_NULL (2)
3008 && OP_SAME_WITH_NULL (3)
3009 && OP_SAME_WITH_NULL (4))));
3011 case ARRAY_REF:
3012 case ARRAY_RANGE_REF:
3013 /* Operands 2 and 3 may be null.
3014 Compare the array index by value if it is constant first as we
3015 may have different types but same value here. */
3016 if (!OP_SAME (0))
3017 return 0;
3018 flags &= ~OEP_ADDRESS_OF;
3019 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3020 TREE_OPERAND (arg1, 1))
3021 || OP_SAME (1))
3022 && OP_SAME_WITH_NULL (2)
3023 && OP_SAME_WITH_NULL (3));
3025 case COMPONENT_REF:
3026 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3027 may be NULL when we're called to compare MEM_EXPRs. */
3028 if (!OP_SAME_WITH_NULL (0)
3029 || !OP_SAME (1))
3030 return 0;
3031 flags &= ~OEP_ADDRESS_OF;
3032 return OP_SAME_WITH_NULL (2);
3034 case BIT_FIELD_REF:
3035 if (!OP_SAME (0))
3036 return 0;
3037 flags &= ~OEP_ADDRESS_OF;
3038 return OP_SAME (1) && OP_SAME (2);
3040 default:
3041 return 0;
3044 case tcc_expression:
3045 switch (TREE_CODE (arg0))
3047 case ADDR_EXPR:
3048 /* Be sure we pass right ADDRESS_OF flag. */
3049 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3050 return operand_equal_p (TREE_OPERAND (arg0, 0),
3051 TREE_OPERAND (arg1, 0),
3052 flags | OEP_ADDRESS_OF);
3054 case TRUTH_NOT_EXPR:
3055 return OP_SAME (0);
3057 case TRUTH_ANDIF_EXPR:
3058 case TRUTH_ORIF_EXPR:
3059 return OP_SAME (0) && OP_SAME (1);
3061 case FMA_EXPR:
3062 case WIDEN_MULT_PLUS_EXPR:
3063 case WIDEN_MULT_MINUS_EXPR:
3064 if (!OP_SAME (2))
3065 return 0;
3066 /* The multiplcation operands are commutative. */
3067 /* FALLTHRU */
3069 case TRUTH_AND_EXPR:
3070 case TRUTH_OR_EXPR:
3071 case TRUTH_XOR_EXPR:
3072 if (OP_SAME (0) && OP_SAME (1))
3073 return 1;
3075 /* Otherwise take into account this is a commutative operation. */
3076 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3077 TREE_OPERAND (arg1, 1), flags)
3078 && operand_equal_p (TREE_OPERAND (arg0, 1),
3079 TREE_OPERAND (arg1, 0), flags));
3081 case COND_EXPR:
3082 case VEC_COND_EXPR:
3083 case DOT_PROD_EXPR:
3084 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3086 default:
3087 return 0;
3090 case tcc_vl_exp:
3091 switch (TREE_CODE (arg0))
3093 case CALL_EXPR:
3094 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3095 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3096 /* If not both CALL_EXPRs are either internal or normal function
3097 functions, then they are not equal. */
3098 return 0;
3099 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3101 /* If the CALL_EXPRs call different internal functions, then they
3102 are not equal. */
3103 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3104 return 0;
3106 else
3108 /* If the CALL_EXPRs call different functions, then they are not
3109 equal. */
3110 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3111 flags))
3112 return 0;
3115 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3117 unsigned int cef = call_expr_flags (arg0);
3118 if (flags & OEP_PURE_SAME)
3119 cef &= ECF_CONST | ECF_PURE;
3120 else
3121 cef &= ECF_CONST;
3122 if (!cef)
3123 return 0;
3126 /* Now see if all the arguments are the same. */
3128 const_call_expr_arg_iterator iter0, iter1;
3129 const_tree a0, a1;
3130 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3131 a1 = first_const_call_expr_arg (arg1, &iter1);
3132 a0 && a1;
3133 a0 = next_const_call_expr_arg (&iter0),
3134 a1 = next_const_call_expr_arg (&iter1))
3135 if (! operand_equal_p (a0, a1, flags))
3136 return 0;
3138 /* If we get here and both argument lists are exhausted
3139 then the CALL_EXPRs are equal. */
3140 return ! (a0 || a1);
3142 default:
3143 return 0;
3146 case tcc_declaration:
3147 /* Consider __builtin_sqrt equal to sqrt. */
3148 return (TREE_CODE (arg0) == FUNCTION_DECL
3149 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3150 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3151 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3153 case tcc_exceptional:
3154 if (TREE_CODE (arg0) == CONSTRUCTOR)
3156 /* In GIMPLE constructors are used only to build vectors from
3157 elements. Individual elements in the constructor must be
3158 indexed in increasing order and form an initial sequence.
3160 We make no effort to compare constructors in generic.
3161 (see sem_variable::equals in ipa-icf which can do so for
3162 constants). */
3163 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3164 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3165 return 0;
3167 /* Be sure that vectors constructed have the same representation.
3168 We only tested element precision and modes to match.
3169 Vectors may be BLKmode and thus also check that the number of
3170 parts match. */
3171 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3172 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3173 return 0;
3175 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3176 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3177 unsigned int len = vec_safe_length (v0);
3179 if (len != vec_safe_length (v1))
3180 return 0;
3182 for (unsigned int i = 0; i < len; i++)
3184 constructor_elt *c0 = &(*v0)[i];
3185 constructor_elt *c1 = &(*v1)[i];
3187 if (!operand_equal_p (c0->value, c1->value, flags)
3188 /* In GIMPLE the indexes can be either NULL or matching i.
3189 Double check this so we won't get false
3190 positives for GENERIC. */
3191 || (c0->index
3192 && (TREE_CODE (c0->index) != INTEGER_CST
3193 || !compare_tree_int (c0->index, i)))
3194 || (c1->index
3195 && (TREE_CODE (c1->index) != INTEGER_CST
3196 || !compare_tree_int (c1->index, i))))
3197 return 0;
3199 return 1;
3201 return 0;
3203 default:
3204 return 0;
3207 #undef OP_SAME
3208 #undef OP_SAME_WITH_NULL
3211 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3212 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3214 When in doubt, return 0. */
3216 static int
3217 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3219 int unsignedp1, unsignedpo;
3220 tree primarg0, primarg1, primother;
3221 unsigned int correct_width;
3223 if (operand_equal_p (arg0, arg1, 0))
3224 return 1;
3226 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3227 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3228 return 0;
3230 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3231 and see if the inner values are the same. This removes any
3232 signedness comparison, which doesn't matter here. */
3233 primarg0 = arg0, primarg1 = arg1;
3234 STRIP_NOPS (primarg0);
3235 STRIP_NOPS (primarg1);
3236 if (operand_equal_p (primarg0, primarg1, 0))
3237 return 1;
3239 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3240 actual comparison operand, ARG0.
3242 First throw away any conversions to wider types
3243 already present in the operands. */
3245 primarg1 = get_narrower (arg1, &unsignedp1);
3246 primother = get_narrower (other, &unsignedpo);
3248 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3249 if (unsignedp1 == unsignedpo
3250 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3251 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3253 tree type = TREE_TYPE (arg0);
3255 /* Make sure shorter operand is extended the right way
3256 to match the longer operand. */
3257 primarg1 = fold_convert (signed_or_unsigned_type_for
3258 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3260 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3261 return 1;
3264 return 0;
3267 /* See if ARG is an expression that is either a comparison or is performing
3268 arithmetic on comparisons. The comparisons must only be comparing
3269 two different values, which will be stored in *CVAL1 and *CVAL2; if
3270 they are nonzero it means that some operands have already been found.
3271 No variables may be used anywhere else in the expression except in the
3272 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3273 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3275 If this is true, return 1. Otherwise, return zero. */
3277 static int
3278 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3280 enum tree_code code = TREE_CODE (arg);
3281 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3283 /* We can handle some of the tcc_expression cases here. */
3284 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3285 tclass = tcc_unary;
3286 else if (tclass == tcc_expression
3287 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3288 || code == COMPOUND_EXPR))
3289 tclass = tcc_binary;
3291 else if (tclass == tcc_expression && code == SAVE_EXPR
3292 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3294 /* If we've already found a CVAL1 or CVAL2, this expression is
3295 two complex to handle. */
3296 if (*cval1 || *cval2)
3297 return 0;
3299 tclass = tcc_unary;
3300 *save_p = 1;
3303 switch (tclass)
3305 case tcc_unary:
3306 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3308 case tcc_binary:
3309 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3310 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3311 cval1, cval2, save_p));
3313 case tcc_constant:
3314 return 1;
3316 case tcc_expression:
3317 if (code == COND_EXPR)
3318 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3319 cval1, cval2, save_p)
3320 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3321 cval1, cval2, save_p)
3322 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3323 cval1, cval2, save_p));
3324 return 0;
3326 case tcc_comparison:
3327 /* First see if we can handle the first operand, then the second. For
3328 the second operand, we know *CVAL1 can't be zero. It must be that
3329 one side of the comparison is each of the values; test for the
3330 case where this isn't true by failing if the two operands
3331 are the same. */
3333 if (operand_equal_p (TREE_OPERAND (arg, 0),
3334 TREE_OPERAND (arg, 1), 0))
3335 return 0;
3337 if (*cval1 == 0)
3338 *cval1 = TREE_OPERAND (arg, 0);
3339 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3341 else if (*cval2 == 0)
3342 *cval2 = TREE_OPERAND (arg, 0);
3343 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3345 else
3346 return 0;
3348 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3350 else if (*cval2 == 0)
3351 *cval2 = TREE_OPERAND (arg, 1);
3352 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3354 else
3355 return 0;
3357 return 1;
3359 default:
3360 return 0;
3364 /* ARG is a tree that is known to contain just arithmetic operations and
3365 comparisons. Evaluate the operations in the tree substituting NEW0 for
3366 any occurrence of OLD0 as an operand of a comparison and likewise for
3367 NEW1 and OLD1. */
3369 static tree
3370 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3371 tree old1, tree new1)
3373 tree type = TREE_TYPE (arg);
3374 enum tree_code code = TREE_CODE (arg);
3375 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3377 /* We can handle some of the tcc_expression cases here. */
3378 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3379 tclass = tcc_unary;
3380 else if (tclass == tcc_expression
3381 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3382 tclass = tcc_binary;
3384 switch (tclass)
3386 case tcc_unary:
3387 return fold_build1_loc (loc, code, type,
3388 eval_subst (loc, TREE_OPERAND (arg, 0),
3389 old0, new0, old1, new1));
3391 case tcc_binary:
3392 return fold_build2_loc (loc, code, type,
3393 eval_subst (loc, TREE_OPERAND (arg, 0),
3394 old0, new0, old1, new1),
3395 eval_subst (loc, TREE_OPERAND (arg, 1),
3396 old0, new0, old1, new1));
3398 case tcc_expression:
3399 switch (code)
3401 case SAVE_EXPR:
3402 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3403 old1, new1);
3405 case COMPOUND_EXPR:
3406 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3407 old1, new1);
3409 case COND_EXPR:
3410 return fold_build3_loc (loc, code, type,
3411 eval_subst (loc, TREE_OPERAND (arg, 0),
3412 old0, new0, old1, new1),
3413 eval_subst (loc, TREE_OPERAND (arg, 1),
3414 old0, new0, old1, new1),
3415 eval_subst (loc, TREE_OPERAND (arg, 2),
3416 old0, new0, old1, new1));
3417 default:
3418 break;
3420 /* Fall through - ??? */
3422 case tcc_comparison:
3424 tree arg0 = TREE_OPERAND (arg, 0);
3425 tree arg1 = TREE_OPERAND (arg, 1);
3427 /* We need to check both for exact equality and tree equality. The
3428 former will be true if the operand has a side-effect. In that
3429 case, we know the operand occurred exactly once. */
3431 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3432 arg0 = new0;
3433 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3434 arg0 = new1;
3436 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3437 arg1 = new0;
3438 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3439 arg1 = new1;
3441 return fold_build2_loc (loc, code, type, arg0, arg1);
3444 default:
3445 return arg;
3449 /* Return a tree for the case when the result of an expression is RESULT
3450 converted to TYPE and OMITTED was previously an operand of the expression
3451 but is now not needed (e.g., we folded OMITTED * 0).
3453 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3454 the conversion of RESULT to TYPE. */
3456 tree
3457 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3459 tree t = fold_convert_loc (loc, type, result);
3461 /* If the resulting operand is an empty statement, just return the omitted
3462 statement casted to void. */
3463 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3464 return build1_loc (loc, NOP_EXPR, void_type_node,
3465 fold_ignored_result (omitted));
3467 if (TREE_SIDE_EFFECTS (omitted))
3468 return build2_loc (loc, COMPOUND_EXPR, type,
3469 fold_ignored_result (omitted), t);
3471 return non_lvalue_loc (loc, t);
3474 /* Return a tree for the case when the result of an expression is RESULT
3475 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3476 of the expression but are now not needed.
3478 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3479 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3480 evaluated before OMITTED2. Otherwise, if neither has side effects,
3481 just do the conversion of RESULT to TYPE. */
3483 tree
3484 omit_two_operands_loc (location_t loc, tree type, tree result,
3485 tree omitted1, tree omitted2)
3487 tree t = fold_convert_loc (loc, type, result);
3489 if (TREE_SIDE_EFFECTS (omitted2))
3490 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3491 if (TREE_SIDE_EFFECTS (omitted1))
3492 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3494 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3498 /* Return a simplified tree node for the truth-negation of ARG. This
3499 never alters ARG itself. We assume that ARG is an operation that
3500 returns a truth value (0 or 1).
3502 FIXME: one would think we would fold the result, but it causes
3503 problems with the dominator optimizer. */
3505 static tree
3506 fold_truth_not_expr (location_t loc, tree arg)
3508 tree type = TREE_TYPE (arg);
3509 enum tree_code code = TREE_CODE (arg);
3510 location_t loc1, loc2;
3512 /* If this is a comparison, we can simply invert it, except for
3513 floating-point non-equality comparisons, in which case we just
3514 enclose a TRUTH_NOT_EXPR around what we have. */
3516 if (TREE_CODE_CLASS (code) == tcc_comparison)
3518 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3519 if (FLOAT_TYPE_P (op_type)
3520 && flag_trapping_math
3521 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3522 && code != NE_EXPR && code != EQ_EXPR)
3523 return NULL_TREE;
3525 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3526 if (code == ERROR_MARK)
3527 return NULL_TREE;
3529 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3530 TREE_OPERAND (arg, 1));
3533 switch (code)
3535 case INTEGER_CST:
3536 return constant_boolean_node (integer_zerop (arg), type);
3538 case TRUTH_AND_EXPR:
3539 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3540 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3541 return build2_loc (loc, TRUTH_OR_EXPR, type,
3542 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3543 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3545 case TRUTH_OR_EXPR:
3546 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3547 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3548 return build2_loc (loc, TRUTH_AND_EXPR, type,
3549 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3550 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3552 case TRUTH_XOR_EXPR:
3553 /* Here we can invert either operand. We invert the first operand
3554 unless the second operand is a TRUTH_NOT_EXPR in which case our
3555 result is the XOR of the first operand with the inside of the
3556 negation of the second operand. */
3558 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3559 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3560 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3561 else
3562 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3563 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3564 TREE_OPERAND (arg, 1));
3566 case TRUTH_ANDIF_EXPR:
3567 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3568 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3569 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3570 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3571 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3573 case TRUTH_ORIF_EXPR:
3574 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3575 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3576 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3577 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3578 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3580 case TRUTH_NOT_EXPR:
3581 return TREE_OPERAND (arg, 0);
3583 case COND_EXPR:
3585 tree arg1 = TREE_OPERAND (arg, 1);
3586 tree arg2 = TREE_OPERAND (arg, 2);
3588 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3589 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3591 /* A COND_EXPR may have a throw as one operand, which
3592 then has void type. Just leave void operands
3593 as they are. */
3594 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3595 VOID_TYPE_P (TREE_TYPE (arg1))
3596 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3597 VOID_TYPE_P (TREE_TYPE (arg2))
3598 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3601 case COMPOUND_EXPR:
3602 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3603 return build2_loc (loc, COMPOUND_EXPR, type,
3604 TREE_OPERAND (arg, 0),
3605 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3607 case NON_LVALUE_EXPR:
3608 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3609 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3611 CASE_CONVERT:
3612 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3613 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3615 /* ... fall through ... */
3617 case FLOAT_EXPR:
3618 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3619 return build1_loc (loc, TREE_CODE (arg), type,
3620 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3622 case BIT_AND_EXPR:
3623 if (!integer_onep (TREE_OPERAND (arg, 1)))
3624 return NULL_TREE;
3625 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3627 case SAVE_EXPR:
3628 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3630 case CLEANUP_POINT_EXPR:
3631 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3632 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3633 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3635 default:
3636 return NULL_TREE;
3640 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3641 assume that ARG is an operation that returns a truth value (0 or 1
3642 for scalars, 0 or -1 for vectors). Return the folded expression if
3643 folding is successful. Otherwise, return NULL_TREE. */
3645 static tree
3646 fold_invert_truthvalue (location_t loc, tree arg)
3648 tree type = TREE_TYPE (arg);
3649 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3650 ? BIT_NOT_EXPR
3651 : TRUTH_NOT_EXPR,
3652 type, arg);
3655 /* Return a simplified tree node for the truth-negation of ARG. This
3656 never alters ARG itself. We assume that ARG is an operation that
3657 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3659 tree
3660 invert_truthvalue_loc (location_t loc, tree arg)
3662 if (TREE_CODE (arg) == ERROR_MARK)
3663 return arg;
3665 tree type = TREE_TYPE (arg);
3666 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3667 ? BIT_NOT_EXPR
3668 : TRUTH_NOT_EXPR,
3669 type, arg);
3672 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3673 with code CODE. This optimization is unsafe. */
3674 static tree
3675 distribute_real_division (location_t loc, enum tree_code code, tree type,
3676 tree arg0, tree arg1)
3678 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3679 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3681 /* (A / C) +- (B / C) -> (A +- B) / C. */
3682 if (mul0 == mul1
3683 && operand_equal_p (TREE_OPERAND (arg0, 1),
3684 TREE_OPERAND (arg1, 1), 0))
3685 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3686 fold_build2_loc (loc, code, type,
3687 TREE_OPERAND (arg0, 0),
3688 TREE_OPERAND (arg1, 0)),
3689 TREE_OPERAND (arg0, 1));
3691 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3692 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3693 TREE_OPERAND (arg1, 0), 0)
3694 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3695 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3697 REAL_VALUE_TYPE r0, r1;
3698 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3699 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3700 if (!mul0)
3701 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3702 if (!mul1)
3703 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3704 real_arithmetic (&r0, code, &r0, &r1);
3705 return fold_build2_loc (loc, MULT_EXPR, type,
3706 TREE_OPERAND (arg0, 0),
3707 build_real (type, r0));
3710 return NULL_TREE;
3713 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3714 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3715 and uses reverse storage order if REVERSEP is nonzero. */
3717 static tree
3718 make_bit_field_ref (location_t loc, tree inner, tree type,
3719 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3720 int unsignedp, int reversep)
3722 tree result, bftype;
3724 if (bitpos == 0 && !reversep)
3726 tree size = TYPE_SIZE (TREE_TYPE (inner));
3727 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3728 || POINTER_TYPE_P (TREE_TYPE (inner)))
3729 && tree_fits_shwi_p (size)
3730 && tree_to_shwi (size) == bitsize)
3731 return fold_convert_loc (loc, type, inner);
3734 bftype = type;
3735 if (TYPE_PRECISION (bftype) != bitsize
3736 || TYPE_UNSIGNED (bftype) == !unsignedp)
3737 bftype = build_nonstandard_integer_type (bitsize, 0);
3739 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3740 size_int (bitsize), bitsize_int (bitpos));
3741 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3743 if (bftype != type)
3744 result = fold_convert_loc (loc, type, result);
3746 return result;
3749 /* Optimize a bit-field compare.
3751 There are two cases: First is a compare against a constant and the
3752 second is a comparison of two items where the fields are at the same
3753 bit position relative to the start of a chunk (byte, halfword, word)
3754 large enough to contain it. In these cases we can avoid the shift
3755 implicit in bitfield extractions.
3757 For constants, we emit a compare of the shifted constant with the
3758 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3759 compared. For two fields at the same position, we do the ANDs with the
3760 similar mask and compare the result of the ANDs.
3762 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3763 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3764 are the left and right operands of the comparison, respectively.
3766 If the optimization described above can be done, we return the resulting
3767 tree. Otherwise we return zero. */
3769 static tree
3770 optimize_bit_field_compare (location_t loc, enum tree_code code,
3771 tree compare_type, tree lhs, tree rhs)
3773 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3774 tree type = TREE_TYPE (lhs);
3775 tree unsigned_type;
3776 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3777 machine_mode lmode, rmode, nmode;
3778 int lunsignedp, runsignedp;
3779 int lreversep, rreversep;
3780 int lvolatilep = 0, rvolatilep = 0;
3781 tree linner, rinner = NULL_TREE;
3782 tree mask;
3783 tree offset;
3785 /* Get all the information about the extractions being done. If the bit size
3786 if the same as the size of the underlying object, we aren't doing an
3787 extraction at all and so can do nothing. We also don't want to
3788 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3789 then will no longer be able to replace it. */
3790 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3791 &lunsignedp, &lreversep, &lvolatilep, false);
3792 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3793 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3794 return 0;
3796 if (const_p)
3797 rreversep = lreversep;
3798 else
3800 /* If this is not a constant, we can only do something if bit positions,
3801 sizes, signedness and storage order are the same. */
3802 rinner
3803 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3804 &runsignedp, &rreversep, &rvolatilep, false);
3806 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3807 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3808 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3809 return 0;
3812 /* See if we can find a mode to refer to this field. We should be able to,
3813 but fail if we can't. */
3814 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3815 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3816 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3817 TYPE_ALIGN (TREE_TYPE (rinner))),
3818 word_mode, false);
3819 if (nmode == VOIDmode)
3820 return 0;
3822 /* Set signed and unsigned types of the precision of this mode for the
3823 shifts below. */
3824 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3826 /* Compute the bit position and size for the new reference and our offset
3827 within it. If the new reference is the same size as the original, we
3828 won't optimize anything, so return zero. */
3829 nbitsize = GET_MODE_BITSIZE (nmode);
3830 nbitpos = lbitpos & ~ (nbitsize - 1);
3831 lbitpos -= nbitpos;
3832 if (nbitsize == lbitsize)
3833 return 0;
3835 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3836 lbitpos = nbitsize - lbitsize - lbitpos;
3838 /* Make the mask to be used against the extracted field. */
3839 mask = build_int_cst_type (unsigned_type, -1);
3840 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3841 mask = const_binop (RSHIFT_EXPR, mask,
3842 size_int (nbitsize - lbitsize - lbitpos));
3844 if (! const_p)
3845 /* If not comparing with constant, just rework the comparison
3846 and return. */
3847 return fold_build2_loc (loc, code, compare_type,
3848 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3849 make_bit_field_ref (loc, linner,
3850 unsigned_type,
3851 nbitsize, nbitpos,
3852 1, lreversep),
3853 mask),
3854 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3855 make_bit_field_ref (loc, rinner,
3856 unsigned_type,
3857 nbitsize, nbitpos,
3858 1, rreversep),
3859 mask));
3861 /* Otherwise, we are handling the constant case. See if the constant is too
3862 big for the field. Warn and return a tree for 0 (false) if so. We do
3863 this not only for its own sake, but to avoid having to test for this
3864 error case below. If we didn't, we might generate wrong code.
3866 For unsigned fields, the constant shifted right by the field length should
3867 be all zero. For signed fields, the high-order bits should agree with
3868 the sign bit. */
3870 if (lunsignedp)
3872 if (wi::lrshift (rhs, lbitsize) != 0)
3874 warning (0, "comparison is always %d due to width of bit-field",
3875 code == NE_EXPR);
3876 return constant_boolean_node (code == NE_EXPR, compare_type);
3879 else
3881 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3882 if (tem != 0 && tem != -1)
3884 warning (0, "comparison is always %d due to width of bit-field",
3885 code == NE_EXPR);
3886 return constant_boolean_node (code == NE_EXPR, compare_type);
3890 /* Single-bit compares should always be against zero. */
3891 if (lbitsize == 1 && ! integer_zerop (rhs))
3893 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3894 rhs = build_int_cst (type, 0);
3897 /* Make a new bitfield reference, shift the constant over the
3898 appropriate number of bits and mask it with the computed mask
3899 (in case this was a signed field). If we changed it, make a new one. */
3900 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1,
3901 lreversep);
3903 rhs = const_binop (BIT_AND_EXPR,
3904 const_binop (LSHIFT_EXPR,
3905 fold_convert_loc (loc, unsigned_type, rhs),
3906 size_int (lbitpos)),
3907 mask);
3909 lhs = build2_loc (loc, code, compare_type,
3910 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3911 return lhs;
3914 /* Subroutine for fold_truth_andor_1: decode a field reference.
3916 If EXP is a comparison reference, we return the innermost reference.
3918 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3919 set to the starting bit number.
3921 If the innermost field can be completely contained in a mode-sized
3922 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3924 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3925 otherwise it is not changed.
3927 *PUNSIGNEDP is set to the signedness of the field.
3929 *PREVERSEP is set to the storage order of the field.
3931 *PMASK is set to the mask used. This is either contained in a
3932 BIT_AND_EXPR or derived from the width of the field.
3934 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3936 Return 0 if this is not a component reference or is one that we can't
3937 do anything with. */
3939 static tree
3940 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3941 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3942 int *punsignedp, int *preversep, int *pvolatilep,
3943 tree *pmask, tree *pand_mask)
3945 tree outer_type = 0;
3946 tree and_mask = 0;
3947 tree mask, inner, offset;
3948 tree unsigned_type;
3949 unsigned int precision;
3951 /* All the optimizations using this function assume integer fields.
3952 There are problems with FP fields since the type_for_size call
3953 below can fail for, e.g., XFmode. */
3954 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3955 return 0;
3957 /* We are interested in the bare arrangement of bits, so strip everything
3958 that doesn't affect the machine mode. However, record the type of the
3959 outermost expression if it may matter below. */
3960 if (CONVERT_EXPR_P (exp)
3961 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3962 outer_type = TREE_TYPE (exp);
3963 STRIP_NOPS (exp);
3965 if (TREE_CODE (exp) == BIT_AND_EXPR)
3967 and_mask = TREE_OPERAND (exp, 1);
3968 exp = TREE_OPERAND (exp, 0);
3969 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3970 if (TREE_CODE (and_mask) != INTEGER_CST)
3971 return 0;
3974 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3975 punsignedp, preversep, pvolatilep, false);
3976 if ((inner == exp && and_mask == 0)
3977 || *pbitsize < 0 || offset != 0
3978 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3979 return 0;
3981 /* If the number of bits in the reference is the same as the bitsize of
3982 the outer type, then the outer type gives the signedness. Otherwise
3983 (in case of a small bitfield) the signedness is unchanged. */
3984 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3985 *punsignedp = TYPE_UNSIGNED (outer_type);
3987 /* Compute the mask to access the bitfield. */
3988 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3989 precision = TYPE_PRECISION (unsigned_type);
3991 mask = build_int_cst_type (unsigned_type, -1);
3993 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3994 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3996 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3997 if (and_mask != 0)
3998 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3999 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4001 *pmask = mask;
4002 *pand_mask = and_mask;
4003 return inner;
4006 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4007 bit positions and MASK is SIGNED. */
4009 static int
4010 all_ones_mask_p (const_tree mask, unsigned int size)
4012 tree type = TREE_TYPE (mask);
4013 unsigned int precision = TYPE_PRECISION (type);
4015 /* If this function returns true when the type of the mask is
4016 UNSIGNED, then there will be errors. In particular see
4017 gcc.c-torture/execute/990326-1.c. There does not appear to be
4018 any documentation paper trail as to why this is so. But the pre
4019 wide-int worked with that restriction and it has been preserved
4020 here. */
4021 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4022 return false;
4024 return wi::mask (size, false, precision) == mask;
4027 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4028 represents the sign bit of EXP's type. If EXP represents a sign
4029 or zero extension, also test VAL against the unextended type.
4030 The return value is the (sub)expression whose sign bit is VAL,
4031 or NULL_TREE otherwise. */
4033 tree
4034 sign_bit_p (tree exp, const_tree val)
4036 int width;
4037 tree t;
4039 /* Tree EXP must have an integral type. */
4040 t = TREE_TYPE (exp);
4041 if (! INTEGRAL_TYPE_P (t))
4042 return NULL_TREE;
4044 /* Tree VAL must be an integer constant. */
4045 if (TREE_CODE (val) != INTEGER_CST
4046 || TREE_OVERFLOW (val))
4047 return NULL_TREE;
4049 width = TYPE_PRECISION (t);
4050 if (wi::only_sign_bit_p (val, width))
4051 return exp;
4053 /* Handle extension from a narrower type. */
4054 if (TREE_CODE (exp) == NOP_EXPR
4055 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4056 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4058 return NULL_TREE;
4061 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4062 to be evaluated unconditionally. */
4064 static int
4065 simple_operand_p (const_tree exp)
4067 /* Strip any conversions that don't change the machine mode. */
4068 STRIP_NOPS (exp);
4070 return (CONSTANT_CLASS_P (exp)
4071 || TREE_CODE (exp) == SSA_NAME
4072 || (DECL_P (exp)
4073 && ! TREE_ADDRESSABLE (exp)
4074 && ! TREE_THIS_VOLATILE (exp)
4075 && ! DECL_NONLOCAL (exp)
4076 /* Don't regard global variables as simple. They may be
4077 allocated in ways unknown to the compiler (shared memory,
4078 #pragma weak, etc). */
4079 && ! TREE_PUBLIC (exp)
4080 && ! DECL_EXTERNAL (exp)
4081 /* Weakrefs are not safe to be read, since they can be NULL.
4082 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4083 have DECL_WEAK flag set. */
4084 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4085 /* Loading a static variable is unduly expensive, but global
4086 registers aren't expensive. */
4087 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4090 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4091 to be evaluated unconditionally.
4092 I addition to simple_operand_p, we assume that comparisons, conversions,
4093 and logic-not operations are simple, if their operands are simple, too. */
4095 static bool
4096 simple_operand_p_2 (tree exp)
4098 enum tree_code code;
4100 if (TREE_SIDE_EFFECTS (exp)
4101 || tree_could_trap_p (exp))
4102 return false;
4104 while (CONVERT_EXPR_P (exp))
4105 exp = TREE_OPERAND (exp, 0);
4107 code = TREE_CODE (exp);
4109 if (TREE_CODE_CLASS (code) == tcc_comparison)
4110 return (simple_operand_p (TREE_OPERAND (exp, 0))
4111 && simple_operand_p (TREE_OPERAND (exp, 1)));
4113 if (code == TRUTH_NOT_EXPR)
4114 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4116 return simple_operand_p (exp);
4120 /* The following functions are subroutines to fold_range_test and allow it to
4121 try to change a logical combination of comparisons into a range test.
4123 For example, both
4124 X == 2 || X == 3 || X == 4 || X == 5
4126 X >= 2 && X <= 5
4127 are converted to
4128 (unsigned) (X - 2) <= 3
4130 We describe each set of comparisons as being either inside or outside
4131 a range, using a variable named like IN_P, and then describe the
4132 range with a lower and upper bound. If one of the bounds is omitted,
4133 it represents either the highest or lowest value of the type.
4135 In the comments below, we represent a range by two numbers in brackets
4136 preceded by a "+" to designate being inside that range, or a "-" to
4137 designate being outside that range, so the condition can be inverted by
4138 flipping the prefix. An omitted bound is represented by a "-". For
4139 example, "- [-, 10]" means being outside the range starting at the lowest
4140 possible value and ending at 10, in other words, being greater than 10.
4141 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4142 always false.
4144 We set up things so that the missing bounds are handled in a consistent
4145 manner so neither a missing bound nor "true" and "false" need to be
4146 handled using a special case. */
4148 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4149 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4150 and UPPER1_P are nonzero if the respective argument is an upper bound
4151 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4152 must be specified for a comparison. ARG1 will be converted to ARG0's
4153 type if both are specified. */
4155 static tree
4156 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4157 tree arg1, int upper1_p)
4159 tree tem;
4160 int result;
4161 int sgn0, sgn1;
4163 /* If neither arg represents infinity, do the normal operation.
4164 Else, if not a comparison, return infinity. Else handle the special
4165 comparison rules. Note that most of the cases below won't occur, but
4166 are handled for consistency. */
4168 if (arg0 != 0 && arg1 != 0)
4170 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4171 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4172 STRIP_NOPS (tem);
4173 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4176 if (TREE_CODE_CLASS (code) != tcc_comparison)
4177 return 0;
4179 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4180 for neither. In real maths, we cannot assume open ended ranges are
4181 the same. But, this is computer arithmetic, where numbers are finite.
4182 We can therefore make the transformation of any unbounded range with
4183 the value Z, Z being greater than any representable number. This permits
4184 us to treat unbounded ranges as equal. */
4185 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4186 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4187 switch (code)
4189 case EQ_EXPR:
4190 result = sgn0 == sgn1;
4191 break;
4192 case NE_EXPR:
4193 result = sgn0 != sgn1;
4194 break;
4195 case LT_EXPR:
4196 result = sgn0 < sgn1;
4197 break;
4198 case LE_EXPR:
4199 result = sgn0 <= sgn1;
4200 break;
4201 case GT_EXPR:
4202 result = sgn0 > sgn1;
4203 break;
4204 case GE_EXPR:
4205 result = sgn0 >= sgn1;
4206 break;
4207 default:
4208 gcc_unreachable ();
4211 return constant_boolean_node (result, type);
4214 /* Helper routine for make_range. Perform one step for it, return
4215 new expression if the loop should continue or NULL_TREE if it should
4216 stop. */
4218 tree
4219 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4220 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4221 bool *strict_overflow_p)
4223 tree arg0_type = TREE_TYPE (arg0);
4224 tree n_low, n_high, low = *p_low, high = *p_high;
4225 int in_p = *p_in_p, n_in_p;
4227 switch (code)
4229 case TRUTH_NOT_EXPR:
4230 /* We can only do something if the range is testing for zero. */
4231 if (low == NULL_TREE || high == NULL_TREE
4232 || ! integer_zerop (low) || ! integer_zerop (high))
4233 return NULL_TREE;
4234 *p_in_p = ! in_p;
4235 return arg0;
4237 case EQ_EXPR: case NE_EXPR:
4238 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4239 /* We can only do something if the range is testing for zero
4240 and if the second operand is an integer constant. Note that
4241 saying something is "in" the range we make is done by
4242 complementing IN_P since it will set in the initial case of
4243 being not equal to zero; "out" is leaving it alone. */
4244 if (low == NULL_TREE || high == NULL_TREE
4245 || ! integer_zerop (low) || ! integer_zerop (high)
4246 || TREE_CODE (arg1) != INTEGER_CST)
4247 return NULL_TREE;
4249 switch (code)
4251 case NE_EXPR: /* - [c, c] */
4252 low = high = arg1;
4253 break;
4254 case EQ_EXPR: /* + [c, c] */
4255 in_p = ! in_p, low = high = arg1;
4256 break;
4257 case GT_EXPR: /* - [-, c] */
4258 low = 0, high = arg1;
4259 break;
4260 case GE_EXPR: /* + [c, -] */
4261 in_p = ! in_p, low = arg1, high = 0;
4262 break;
4263 case LT_EXPR: /* - [c, -] */
4264 low = arg1, high = 0;
4265 break;
4266 case LE_EXPR: /* + [-, c] */
4267 in_p = ! in_p, low = 0, high = arg1;
4268 break;
4269 default:
4270 gcc_unreachable ();
4273 /* If this is an unsigned comparison, we also know that EXP is
4274 greater than or equal to zero. We base the range tests we make
4275 on that fact, so we record it here so we can parse existing
4276 range tests. We test arg0_type since often the return type
4277 of, e.g. EQ_EXPR, is boolean. */
4278 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4280 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4281 in_p, low, high, 1,
4282 build_int_cst (arg0_type, 0),
4283 NULL_TREE))
4284 return NULL_TREE;
4286 in_p = n_in_p, low = n_low, high = n_high;
4288 /* If the high bound is missing, but we have a nonzero low
4289 bound, reverse the range so it goes from zero to the low bound
4290 minus 1. */
4291 if (high == 0 && low && ! integer_zerop (low))
4293 in_p = ! in_p;
4294 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4295 build_int_cst (TREE_TYPE (low), 1), 0);
4296 low = build_int_cst (arg0_type, 0);
4300 *p_low = low;
4301 *p_high = high;
4302 *p_in_p = in_p;
4303 return arg0;
4305 case NEGATE_EXPR:
4306 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4307 low and high are non-NULL, then normalize will DTRT. */
4308 if (!TYPE_UNSIGNED (arg0_type)
4309 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4311 if (low == NULL_TREE)
4312 low = TYPE_MIN_VALUE (arg0_type);
4313 if (high == NULL_TREE)
4314 high = TYPE_MAX_VALUE (arg0_type);
4317 /* (-x) IN [a,b] -> x in [-b, -a] */
4318 n_low = range_binop (MINUS_EXPR, exp_type,
4319 build_int_cst (exp_type, 0),
4320 0, high, 1);
4321 n_high = range_binop (MINUS_EXPR, exp_type,
4322 build_int_cst (exp_type, 0),
4323 0, low, 0);
4324 if (n_high != 0 && TREE_OVERFLOW (n_high))
4325 return NULL_TREE;
4326 goto normalize;
4328 case BIT_NOT_EXPR:
4329 /* ~ X -> -X - 1 */
4330 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4331 build_int_cst (exp_type, 1));
4333 case PLUS_EXPR:
4334 case MINUS_EXPR:
4335 if (TREE_CODE (arg1) != INTEGER_CST)
4336 return NULL_TREE;
4338 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4339 move a constant to the other side. */
4340 if (!TYPE_UNSIGNED (arg0_type)
4341 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4342 return NULL_TREE;
4344 /* If EXP is signed, any overflow in the computation is undefined,
4345 so we don't worry about it so long as our computations on
4346 the bounds don't overflow. For unsigned, overflow is defined
4347 and this is exactly the right thing. */
4348 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4349 arg0_type, low, 0, arg1, 0);
4350 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4351 arg0_type, high, 1, arg1, 0);
4352 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4353 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4354 return NULL_TREE;
4356 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4357 *strict_overflow_p = true;
4359 normalize:
4360 /* Check for an unsigned range which has wrapped around the maximum
4361 value thus making n_high < n_low, and normalize it. */
4362 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4364 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4365 build_int_cst (TREE_TYPE (n_high), 1), 0);
4366 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4367 build_int_cst (TREE_TYPE (n_low), 1), 0);
4369 /* If the range is of the form +/- [ x+1, x ], we won't
4370 be able to normalize it. But then, it represents the
4371 whole range or the empty set, so make it
4372 +/- [ -, - ]. */
4373 if (tree_int_cst_equal (n_low, low)
4374 && tree_int_cst_equal (n_high, high))
4375 low = high = 0;
4376 else
4377 in_p = ! in_p;
4379 else
4380 low = n_low, high = n_high;
4382 *p_low = low;
4383 *p_high = high;
4384 *p_in_p = in_p;
4385 return arg0;
4387 CASE_CONVERT:
4388 case NON_LVALUE_EXPR:
4389 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4390 return NULL_TREE;
4392 if (! INTEGRAL_TYPE_P (arg0_type)
4393 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4394 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4395 return NULL_TREE;
4397 n_low = low, n_high = high;
4399 if (n_low != 0)
4400 n_low = fold_convert_loc (loc, arg0_type, n_low);
4402 if (n_high != 0)
4403 n_high = fold_convert_loc (loc, arg0_type, n_high);
4405 /* If we're converting arg0 from an unsigned type, to exp,
4406 a signed type, we will be doing the comparison as unsigned.
4407 The tests above have already verified that LOW and HIGH
4408 are both positive.
4410 So we have to ensure that we will handle large unsigned
4411 values the same way that the current signed bounds treat
4412 negative values. */
4414 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4416 tree high_positive;
4417 tree equiv_type;
4418 /* For fixed-point modes, we need to pass the saturating flag
4419 as the 2nd parameter. */
4420 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4421 equiv_type
4422 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4423 TYPE_SATURATING (arg0_type));
4424 else
4425 equiv_type
4426 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4428 /* A range without an upper bound is, naturally, unbounded.
4429 Since convert would have cropped a very large value, use
4430 the max value for the destination type. */
4431 high_positive
4432 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4433 : TYPE_MAX_VALUE (arg0_type);
4435 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4436 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4437 fold_convert_loc (loc, arg0_type,
4438 high_positive),
4439 build_int_cst (arg0_type, 1));
4441 /* If the low bound is specified, "and" the range with the
4442 range for which the original unsigned value will be
4443 positive. */
4444 if (low != 0)
4446 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4447 1, fold_convert_loc (loc, arg0_type,
4448 integer_zero_node),
4449 high_positive))
4450 return NULL_TREE;
4452 in_p = (n_in_p == in_p);
4454 else
4456 /* Otherwise, "or" the range with the range of the input
4457 that will be interpreted as negative. */
4458 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4459 1, fold_convert_loc (loc, arg0_type,
4460 integer_zero_node),
4461 high_positive))
4462 return NULL_TREE;
4464 in_p = (in_p != n_in_p);
4468 *p_low = n_low;
4469 *p_high = n_high;
4470 *p_in_p = in_p;
4471 return arg0;
4473 default:
4474 return NULL_TREE;
4478 /* Given EXP, a logical expression, set the range it is testing into
4479 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4480 actually being tested. *PLOW and *PHIGH will be made of the same
4481 type as the returned expression. If EXP is not a comparison, we
4482 will most likely not be returning a useful value and range. Set
4483 *STRICT_OVERFLOW_P to true if the return value is only valid
4484 because signed overflow is undefined; otherwise, do not change
4485 *STRICT_OVERFLOW_P. */
4487 tree
4488 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4489 bool *strict_overflow_p)
4491 enum tree_code code;
4492 tree arg0, arg1 = NULL_TREE;
4493 tree exp_type, nexp;
4494 int in_p;
4495 tree low, high;
4496 location_t loc = EXPR_LOCATION (exp);
4498 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4499 and see if we can refine the range. Some of the cases below may not
4500 happen, but it doesn't seem worth worrying about this. We "continue"
4501 the outer loop when we've changed something; otherwise we "break"
4502 the switch, which will "break" the while. */
4504 in_p = 0;
4505 low = high = build_int_cst (TREE_TYPE (exp), 0);
4507 while (1)
4509 code = TREE_CODE (exp);
4510 exp_type = TREE_TYPE (exp);
4511 arg0 = NULL_TREE;
4513 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4515 if (TREE_OPERAND_LENGTH (exp) > 0)
4516 arg0 = TREE_OPERAND (exp, 0);
4517 if (TREE_CODE_CLASS (code) == tcc_binary
4518 || TREE_CODE_CLASS (code) == tcc_comparison
4519 || (TREE_CODE_CLASS (code) == tcc_expression
4520 && TREE_OPERAND_LENGTH (exp) > 1))
4521 arg1 = TREE_OPERAND (exp, 1);
4523 if (arg0 == NULL_TREE)
4524 break;
4526 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4527 &high, &in_p, strict_overflow_p);
4528 if (nexp == NULL_TREE)
4529 break;
4530 exp = nexp;
4533 /* If EXP is a constant, we can evaluate whether this is true or false. */
4534 if (TREE_CODE (exp) == INTEGER_CST)
4536 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4537 exp, 0, low, 0))
4538 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4539 exp, 1, high, 1)));
4540 low = high = 0;
4541 exp = 0;
4544 *pin_p = in_p, *plow = low, *phigh = high;
4545 return exp;
4548 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4549 type, TYPE, return an expression to test if EXP is in (or out of, depending
4550 on IN_P) the range. Return 0 if the test couldn't be created. */
4552 tree
4553 build_range_check (location_t loc, tree type, tree exp, int in_p,
4554 tree low, tree high)
4556 tree etype = TREE_TYPE (exp), value;
4558 /* Disable this optimization for function pointer expressions
4559 on targets that require function pointer canonicalization. */
4560 if (targetm.have_canonicalize_funcptr_for_compare ()
4561 && TREE_CODE (etype) == POINTER_TYPE
4562 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4563 return NULL_TREE;
4565 if (! in_p)
4567 value = build_range_check (loc, type, exp, 1, low, high);
4568 if (value != 0)
4569 return invert_truthvalue_loc (loc, value);
4571 return 0;
4574 if (low == 0 && high == 0)
4575 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4577 if (low == 0)
4578 return fold_build2_loc (loc, LE_EXPR, type, exp,
4579 fold_convert_loc (loc, etype, high));
4581 if (high == 0)
4582 return fold_build2_loc (loc, GE_EXPR, type, exp,
4583 fold_convert_loc (loc, etype, low));
4585 if (operand_equal_p (low, high, 0))
4586 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4587 fold_convert_loc (loc, etype, low));
4589 if (integer_zerop (low))
4591 if (! TYPE_UNSIGNED (etype))
4593 etype = unsigned_type_for (etype);
4594 high = fold_convert_loc (loc, etype, high);
4595 exp = fold_convert_loc (loc, etype, exp);
4597 return build_range_check (loc, type, exp, 1, 0, high);
4600 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4601 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4603 int prec = TYPE_PRECISION (etype);
4605 if (wi::mask (prec - 1, false, prec) == high)
4607 if (TYPE_UNSIGNED (etype))
4609 tree signed_etype = signed_type_for (etype);
4610 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4611 etype
4612 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4613 else
4614 etype = signed_etype;
4615 exp = fold_convert_loc (loc, etype, exp);
4617 return fold_build2_loc (loc, GT_EXPR, type, exp,
4618 build_int_cst (etype, 0));
4622 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4623 This requires wrap-around arithmetics for the type of the expression.
4624 First make sure that arithmetics in this type is valid, then make sure
4625 that it wraps around. */
4626 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4627 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4628 TYPE_UNSIGNED (etype));
4630 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4632 tree utype, minv, maxv;
4634 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4635 for the type in question, as we rely on this here. */
4636 utype = unsigned_type_for (etype);
4637 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4638 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4639 build_int_cst (TREE_TYPE (maxv), 1), 1);
4640 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4642 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4643 minv, 1, maxv, 1)))
4644 etype = utype;
4645 else
4646 return 0;
4649 high = fold_convert_loc (loc, etype, high);
4650 low = fold_convert_loc (loc, etype, low);
4651 exp = fold_convert_loc (loc, etype, exp);
4653 value = const_binop (MINUS_EXPR, high, low);
4656 if (POINTER_TYPE_P (etype))
4658 if (value != 0 && !TREE_OVERFLOW (value))
4660 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4661 return build_range_check (loc, type,
4662 fold_build_pointer_plus_loc (loc, exp, low),
4663 1, build_int_cst (etype, 0), value);
4665 return 0;
4668 if (value != 0 && !TREE_OVERFLOW (value))
4669 return build_range_check (loc, type,
4670 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4671 1, build_int_cst (etype, 0), value);
4673 return 0;
4676 /* Return the predecessor of VAL in its type, handling the infinite case. */
4678 static tree
4679 range_predecessor (tree val)
4681 tree type = TREE_TYPE (val);
4683 if (INTEGRAL_TYPE_P (type)
4684 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4685 return 0;
4686 else
4687 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4688 build_int_cst (TREE_TYPE (val), 1), 0);
4691 /* Return the successor of VAL in its type, handling the infinite case. */
4693 static tree
4694 range_successor (tree val)
4696 tree type = TREE_TYPE (val);
4698 if (INTEGRAL_TYPE_P (type)
4699 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4700 return 0;
4701 else
4702 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4703 build_int_cst (TREE_TYPE (val), 1), 0);
4706 /* Given two ranges, see if we can merge them into one. Return 1 if we
4707 can, 0 if we can't. Set the output range into the specified parameters. */
4709 bool
4710 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4711 tree high0, int in1_p, tree low1, tree high1)
4713 int no_overlap;
4714 int subset;
4715 int temp;
4716 tree tem;
4717 int in_p;
4718 tree low, high;
4719 int lowequal = ((low0 == 0 && low1 == 0)
4720 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4721 low0, 0, low1, 0)));
4722 int highequal = ((high0 == 0 && high1 == 0)
4723 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4724 high0, 1, high1, 1)));
4726 /* Make range 0 be the range that starts first, or ends last if they
4727 start at the same value. Swap them if it isn't. */
4728 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4729 low0, 0, low1, 0))
4730 || (lowequal
4731 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4732 high1, 1, high0, 1))))
4734 temp = in0_p, in0_p = in1_p, in1_p = temp;
4735 tem = low0, low0 = low1, low1 = tem;
4736 tem = high0, high0 = high1, high1 = tem;
4739 /* Now flag two cases, whether the ranges are disjoint or whether the
4740 second range is totally subsumed in the first. Note that the tests
4741 below are simplified by the ones above. */
4742 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4743 high0, 1, low1, 0));
4744 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4745 high1, 1, high0, 1));
4747 /* We now have four cases, depending on whether we are including or
4748 excluding the two ranges. */
4749 if (in0_p && in1_p)
4751 /* If they don't overlap, the result is false. If the second range
4752 is a subset it is the result. Otherwise, the range is from the start
4753 of the second to the end of the first. */
4754 if (no_overlap)
4755 in_p = 0, low = high = 0;
4756 else if (subset)
4757 in_p = 1, low = low1, high = high1;
4758 else
4759 in_p = 1, low = low1, high = high0;
4762 else if (in0_p && ! in1_p)
4764 /* If they don't overlap, the result is the first range. If they are
4765 equal, the result is false. If the second range is a subset of the
4766 first, and the ranges begin at the same place, we go from just after
4767 the end of the second range to the end of the first. If the second
4768 range is not a subset of the first, or if it is a subset and both
4769 ranges end at the same place, the range starts at the start of the
4770 first range and ends just before the second range.
4771 Otherwise, we can't describe this as a single range. */
4772 if (no_overlap)
4773 in_p = 1, low = low0, high = high0;
4774 else if (lowequal && highequal)
4775 in_p = 0, low = high = 0;
4776 else if (subset && lowequal)
4778 low = range_successor (high1);
4779 high = high0;
4780 in_p = 1;
4781 if (low == 0)
4783 /* We are in the weird situation where high0 > high1 but
4784 high1 has no successor. Punt. */
4785 return 0;
4788 else if (! subset || highequal)
4790 low = low0;
4791 high = range_predecessor (low1);
4792 in_p = 1;
4793 if (high == 0)
4795 /* low0 < low1 but low1 has no predecessor. Punt. */
4796 return 0;
4799 else
4800 return 0;
4803 else if (! in0_p && in1_p)
4805 /* If they don't overlap, the result is the second range. If the second
4806 is a subset of the first, the result is false. Otherwise,
4807 the range starts just after the first range and ends at the
4808 end of the second. */
4809 if (no_overlap)
4810 in_p = 1, low = low1, high = high1;
4811 else if (subset || highequal)
4812 in_p = 0, low = high = 0;
4813 else
4815 low = range_successor (high0);
4816 high = high1;
4817 in_p = 1;
4818 if (low == 0)
4820 /* high1 > high0 but high0 has no successor. Punt. */
4821 return 0;
4826 else
4828 /* The case where we are excluding both ranges. Here the complex case
4829 is if they don't overlap. In that case, the only time we have a
4830 range is if they are adjacent. If the second is a subset of the
4831 first, the result is the first. Otherwise, the range to exclude
4832 starts at the beginning of the first range and ends at the end of the
4833 second. */
4834 if (no_overlap)
4836 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4837 range_successor (high0),
4838 1, low1, 0)))
4839 in_p = 0, low = low0, high = high1;
4840 else
4842 /* Canonicalize - [min, x] into - [-, x]. */
4843 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4844 switch (TREE_CODE (TREE_TYPE (low0)))
4846 case ENUMERAL_TYPE:
4847 if (TYPE_PRECISION (TREE_TYPE (low0))
4848 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4849 break;
4850 /* FALLTHROUGH */
4851 case INTEGER_TYPE:
4852 if (tree_int_cst_equal (low0,
4853 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4854 low0 = 0;
4855 break;
4856 case POINTER_TYPE:
4857 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4858 && integer_zerop (low0))
4859 low0 = 0;
4860 break;
4861 default:
4862 break;
4865 /* Canonicalize - [x, max] into - [x, -]. */
4866 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4867 switch (TREE_CODE (TREE_TYPE (high1)))
4869 case ENUMERAL_TYPE:
4870 if (TYPE_PRECISION (TREE_TYPE (high1))
4871 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4872 break;
4873 /* FALLTHROUGH */
4874 case INTEGER_TYPE:
4875 if (tree_int_cst_equal (high1,
4876 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4877 high1 = 0;
4878 break;
4879 case POINTER_TYPE:
4880 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4881 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4882 high1, 1,
4883 build_int_cst (TREE_TYPE (high1), 1),
4884 1)))
4885 high1 = 0;
4886 break;
4887 default:
4888 break;
4891 /* The ranges might be also adjacent between the maximum and
4892 minimum values of the given type. For
4893 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4894 return + [x + 1, y - 1]. */
4895 if (low0 == 0 && high1 == 0)
4897 low = range_successor (high0);
4898 high = range_predecessor (low1);
4899 if (low == 0 || high == 0)
4900 return 0;
4902 in_p = 1;
4904 else
4905 return 0;
4908 else if (subset)
4909 in_p = 0, low = low0, high = high0;
4910 else
4911 in_p = 0, low = low0, high = high1;
4914 *pin_p = in_p, *plow = low, *phigh = high;
4915 return 1;
4919 /* Subroutine of fold, looking inside expressions of the form
4920 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4921 of the COND_EXPR. This function is being used also to optimize
4922 A op B ? C : A, by reversing the comparison first.
4924 Return a folded expression whose code is not a COND_EXPR
4925 anymore, or NULL_TREE if no folding opportunity is found. */
4927 static tree
4928 fold_cond_expr_with_comparison (location_t loc, tree type,
4929 tree arg0, tree arg1, tree arg2)
4931 enum tree_code comp_code = TREE_CODE (arg0);
4932 tree arg00 = TREE_OPERAND (arg0, 0);
4933 tree arg01 = TREE_OPERAND (arg0, 1);
4934 tree arg1_type = TREE_TYPE (arg1);
4935 tree tem;
4937 STRIP_NOPS (arg1);
4938 STRIP_NOPS (arg2);
4940 /* If we have A op 0 ? A : -A, consider applying the following
4941 transformations:
4943 A == 0? A : -A same as -A
4944 A != 0? A : -A same as A
4945 A >= 0? A : -A same as abs (A)
4946 A > 0? A : -A same as abs (A)
4947 A <= 0? A : -A same as -abs (A)
4948 A < 0? A : -A same as -abs (A)
4950 None of these transformations work for modes with signed
4951 zeros. If A is +/-0, the first two transformations will
4952 change the sign of the result (from +0 to -0, or vice
4953 versa). The last four will fix the sign of the result,
4954 even though the original expressions could be positive or
4955 negative, depending on the sign of A.
4957 Note that all these transformations are correct if A is
4958 NaN, since the two alternatives (A and -A) are also NaNs. */
4959 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4960 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4961 ? real_zerop (arg01)
4962 : integer_zerop (arg01))
4963 && ((TREE_CODE (arg2) == NEGATE_EXPR
4964 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4965 /* In the case that A is of the form X-Y, '-A' (arg2) may
4966 have already been folded to Y-X, check for that. */
4967 || (TREE_CODE (arg1) == MINUS_EXPR
4968 && TREE_CODE (arg2) == MINUS_EXPR
4969 && operand_equal_p (TREE_OPERAND (arg1, 0),
4970 TREE_OPERAND (arg2, 1), 0)
4971 && operand_equal_p (TREE_OPERAND (arg1, 1),
4972 TREE_OPERAND (arg2, 0), 0))))
4973 switch (comp_code)
4975 case EQ_EXPR:
4976 case UNEQ_EXPR:
4977 tem = fold_convert_loc (loc, arg1_type, arg1);
4978 return pedantic_non_lvalue_loc (loc,
4979 fold_convert_loc (loc, type,
4980 negate_expr (tem)));
4981 case NE_EXPR:
4982 case LTGT_EXPR:
4983 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4984 case UNGE_EXPR:
4985 case UNGT_EXPR:
4986 if (flag_trapping_math)
4987 break;
4988 /* Fall through. */
4989 case GE_EXPR:
4990 case GT_EXPR:
4991 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4992 break;
4993 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4994 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4995 case UNLE_EXPR:
4996 case UNLT_EXPR:
4997 if (flag_trapping_math)
4998 break;
4999 case LE_EXPR:
5000 case LT_EXPR:
5001 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5002 break;
5003 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5004 return negate_expr (fold_convert_loc (loc, type, tem));
5005 default:
5006 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5007 break;
5010 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5011 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5012 both transformations are correct when A is NaN: A != 0
5013 is then true, and A == 0 is false. */
5015 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5016 && integer_zerop (arg01) && integer_zerop (arg2))
5018 if (comp_code == NE_EXPR)
5019 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5020 else if (comp_code == EQ_EXPR)
5021 return build_zero_cst (type);
5024 /* Try some transformations of A op B ? A : B.
5026 A == B? A : B same as B
5027 A != B? A : B same as A
5028 A >= B? A : B same as max (A, B)
5029 A > B? A : B same as max (B, A)
5030 A <= B? A : B same as min (A, B)
5031 A < B? A : B same as min (B, A)
5033 As above, these transformations don't work in the presence
5034 of signed zeros. For example, if A and B are zeros of
5035 opposite sign, the first two transformations will change
5036 the sign of the result. In the last four, the original
5037 expressions give different results for (A=+0, B=-0) and
5038 (A=-0, B=+0), but the transformed expressions do not.
5040 The first two transformations are correct if either A or B
5041 is a NaN. In the first transformation, the condition will
5042 be false, and B will indeed be chosen. In the case of the
5043 second transformation, the condition A != B will be true,
5044 and A will be chosen.
5046 The conversions to max() and min() are not correct if B is
5047 a number and A is not. The conditions in the original
5048 expressions will be false, so all four give B. The min()
5049 and max() versions would give a NaN instead. */
5050 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5051 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5052 /* Avoid these transformations if the COND_EXPR may be used
5053 as an lvalue in the C++ front-end. PR c++/19199. */
5054 && (in_gimple_form
5055 || VECTOR_TYPE_P (type)
5056 || (! lang_GNU_CXX ()
5057 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5058 || ! maybe_lvalue_p (arg1)
5059 || ! maybe_lvalue_p (arg2)))
5061 tree comp_op0 = arg00;
5062 tree comp_op1 = arg01;
5063 tree comp_type = TREE_TYPE (comp_op0);
5065 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5066 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5068 comp_type = type;
5069 comp_op0 = arg1;
5070 comp_op1 = arg2;
5073 switch (comp_code)
5075 case EQ_EXPR:
5076 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5077 case NE_EXPR:
5078 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5079 case LE_EXPR:
5080 case LT_EXPR:
5081 case UNLE_EXPR:
5082 case UNLT_EXPR:
5083 /* In C++ a ?: expression can be an lvalue, so put the
5084 operand which will be used if they are equal first
5085 so that we can convert this back to the
5086 corresponding COND_EXPR. */
5087 if (!HONOR_NANS (arg1))
5089 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5090 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5091 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5092 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5093 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5094 comp_op1, comp_op0);
5095 return pedantic_non_lvalue_loc (loc,
5096 fold_convert_loc (loc, type, tem));
5098 break;
5099 case GE_EXPR:
5100 case GT_EXPR:
5101 case UNGE_EXPR:
5102 case UNGT_EXPR:
5103 if (!HONOR_NANS (arg1))
5105 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5106 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5107 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5108 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5109 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5110 comp_op1, comp_op0);
5111 return pedantic_non_lvalue_loc (loc,
5112 fold_convert_loc (loc, type, tem));
5114 break;
5115 case UNEQ_EXPR:
5116 if (!HONOR_NANS (arg1))
5117 return pedantic_non_lvalue_loc (loc,
5118 fold_convert_loc (loc, type, arg2));
5119 break;
5120 case LTGT_EXPR:
5121 if (!HONOR_NANS (arg1))
5122 return pedantic_non_lvalue_loc (loc,
5123 fold_convert_loc (loc, type, arg1));
5124 break;
5125 default:
5126 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5127 break;
5131 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5132 we might still be able to simplify this. For example,
5133 if C1 is one less or one more than C2, this might have started
5134 out as a MIN or MAX and been transformed by this function.
5135 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5137 if (INTEGRAL_TYPE_P (type)
5138 && TREE_CODE (arg01) == INTEGER_CST
5139 && TREE_CODE (arg2) == INTEGER_CST)
5140 switch (comp_code)
5142 case EQ_EXPR:
5143 if (TREE_CODE (arg1) == INTEGER_CST)
5144 break;
5145 /* We can replace A with C1 in this case. */
5146 arg1 = fold_convert_loc (loc, type, arg01);
5147 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5149 case LT_EXPR:
5150 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5151 MIN_EXPR, to preserve the signedness of the comparison. */
5152 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5153 OEP_ONLY_CONST)
5154 && operand_equal_p (arg01,
5155 const_binop (PLUS_EXPR, arg2,
5156 build_int_cst (type, 1)),
5157 OEP_ONLY_CONST))
5159 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5160 fold_convert_loc (loc, TREE_TYPE (arg00),
5161 arg2));
5162 return pedantic_non_lvalue_loc (loc,
5163 fold_convert_loc (loc, type, tem));
5165 break;
5167 case LE_EXPR:
5168 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5169 as above. */
5170 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5171 OEP_ONLY_CONST)
5172 && operand_equal_p (arg01,
5173 const_binop (MINUS_EXPR, arg2,
5174 build_int_cst (type, 1)),
5175 OEP_ONLY_CONST))
5177 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5178 fold_convert_loc (loc, TREE_TYPE (arg00),
5179 arg2));
5180 return pedantic_non_lvalue_loc (loc,
5181 fold_convert_loc (loc, type, tem));
5183 break;
5185 case GT_EXPR:
5186 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5187 MAX_EXPR, to preserve the signedness of the comparison. */
5188 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5189 OEP_ONLY_CONST)
5190 && operand_equal_p (arg01,
5191 const_binop (MINUS_EXPR, arg2,
5192 build_int_cst (type, 1)),
5193 OEP_ONLY_CONST))
5195 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5196 fold_convert_loc (loc, TREE_TYPE (arg00),
5197 arg2));
5198 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5200 break;
5202 case GE_EXPR:
5203 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5204 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5205 OEP_ONLY_CONST)
5206 && operand_equal_p (arg01,
5207 const_binop (PLUS_EXPR, arg2,
5208 build_int_cst (type, 1)),
5209 OEP_ONLY_CONST))
5211 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5212 fold_convert_loc (loc, TREE_TYPE (arg00),
5213 arg2));
5214 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5216 break;
5217 case NE_EXPR:
5218 break;
5219 default:
5220 gcc_unreachable ();
5223 return NULL_TREE;
5228 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5229 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5230 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5231 false) >= 2)
5232 #endif
5234 /* EXP is some logical combination of boolean tests. See if we can
5235 merge it into some range test. Return the new tree if so. */
5237 static tree
5238 fold_range_test (location_t loc, enum tree_code code, tree type,
5239 tree op0, tree op1)
5241 int or_op = (code == TRUTH_ORIF_EXPR
5242 || code == TRUTH_OR_EXPR);
5243 int in0_p, in1_p, in_p;
5244 tree low0, low1, low, high0, high1, high;
5245 bool strict_overflow_p = false;
5246 tree tem, lhs, rhs;
5247 const char * const warnmsg = G_("assuming signed overflow does not occur "
5248 "when simplifying range test");
5250 if (!INTEGRAL_TYPE_P (type))
5251 return 0;
5253 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5254 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5256 /* If this is an OR operation, invert both sides; we will invert
5257 again at the end. */
5258 if (or_op)
5259 in0_p = ! in0_p, in1_p = ! in1_p;
5261 /* If both expressions are the same, if we can merge the ranges, and we
5262 can build the range test, return it or it inverted. If one of the
5263 ranges is always true or always false, consider it to be the same
5264 expression as the other. */
5265 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5266 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5267 in1_p, low1, high1)
5268 && 0 != (tem = (build_range_check (loc, type,
5269 lhs != 0 ? lhs
5270 : rhs != 0 ? rhs : integer_zero_node,
5271 in_p, low, high))))
5273 if (strict_overflow_p)
5274 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5275 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5278 /* On machines where the branch cost is expensive, if this is a
5279 short-circuited branch and the underlying object on both sides
5280 is the same, make a non-short-circuit operation. */
5281 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5282 && lhs != 0 && rhs != 0
5283 && (code == TRUTH_ANDIF_EXPR
5284 || code == TRUTH_ORIF_EXPR)
5285 && operand_equal_p (lhs, rhs, 0))
5287 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5288 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5289 which cases we can't do this. */
5290 if (simple_operand_p (lhs))
5291 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5292 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5293 type, op0, op1);
5295 else if (!lang_hooks.decls.global_bindings_p ()
5296 && !CONTAINS_PLACEHOLDER_P (lhs))
5298 tree common = save_expr (lhs);
5300 if (0 != (lhs = build_range_check (loc, type, common,
5301 or_op ? ! in0_p : in0_p,
5302 low0, high0))
5303 && (0 != (rhs = build_range_check (loc, type, common,
5304 or_op ? ! in1_p : in1_p,
5305 low1, high1))))
5307 if (strict_overflow_p)
5308 fold_overflow_warning (warnmsg,
5309 WARN_STRICT_OVERFLOW_COMPARISON);
5310 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5311 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5312 type, lhs, rhs);
5317 return 0;
5320 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5321 bit value. Arrange things so the extra bits will be set to zero if and
5322 only if C is signed-extended to its full width. If MASK is nonzero,
5323 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5325 static tree
5326 unextend (tree c, int p, int unsignedp, tree mask)
5328 tree type = TREE_TYPE (c);
5329 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5330 tree temp;
5332 if (p == modesize || unsignedp)
5333 return c;
5335 /* We work by getting just the sign bit into the low-order bit, then
5336 into the high-order bit, then sign-extend. We then XOR that value
5337 with C. */
5338 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5340 /* We must use a signed type in order to get an arithmetic right shift.
5341 However, we must also avoid introducing accidental overflows, so that
5342 a subsequent call to integer_zerop will work. Hence we must
5343 do the type conversion here. At this point, the constant is either
5344 zero or one, and the conversion to a signed type can never overflow.
5345 We could get an overflow if this conversion is done anywhere else. */
5346 if (TYPE_UNSIGNED (type))
5347 temp = fold_convert (signed_type_for (type), temp);
5349 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5350 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5351 if (mask != 0)
5352 temp = const_binop (BIT_AND_EXPR, temp,
5353 fold_convert (TREE_TYPE (c), mask));
5354 /* If necessary, convert the type back to match the type of C. */
5355 if (TYPE_UNSIGNED (type))
5356 temp = fold_convert (type, temp);
5358 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5361 /* For an expression that has the form
5362 (A && B) || ~B
5364 (A || B) && ~B,
5365 we can drop one of the inner expressions and simplify to
5366 A || ~B
5368 A && ~B
5369 LOC is the location of the resulting expression. OP is the inner
5370 logical operation; the left-hand side in the examples above, while CMPOP
5371 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5372 removing a condition that guards another, as in
5373 (A != NULL && A->...) || A == NULL
5374 which we must not transform. If RHS_ONLY is true, only eliminate the
5375 right-most operand of the inner logical operation. */
5377 static tree
5378 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5379 bool rhs_only)
5381 tree type = TREE_TYPE (cmpop);
5382 enum tree_code code = TREE_CODE (cmpop);
5383 enum tree_code truthop_code = TREE_CODE (op);
5384 tree lhs = TREE_OPERAND (op, 0);
5385 tree rhs = TREE_OPERAND (op, 1);
5386 tree orig_lhs = lhs, orig_rhs = rhs;
5387 enum tree_code rhs_code = TREE_CODE (rhs);
5388 enum tree_code lhs_code = TREE_CODE (lhs);
5389 enum tree_code inv_code;
5391 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5392 return NULL_TREE;
5394 if (TREE_CODE_CLASS (code) != tcc_comparison)
5395 return NULL_TREE;
5397 if (rhs_code == truthop_code)
5399 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5400 if (newrhs != NULL_TREE)
5402 rhs = newrhs;
5403 rhs_code = TREE_CODE (rhs);
5406 if (lhs_code == truthop_code && !rhs_only)
5408 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5409 if (newlhs != NULL_TREE)
5411 lhs = newlhs;
5412 lhs_code = TREE_CODE (lhs);
5416 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5417 if (inv_code == rhs_code
5418 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5419 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5420 return lhs;
5421 if (!rhs_only && inv_code == lhs_code
5422 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5423 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5424 return rhs;
5425 if (rhs != orig_rhs || lhs != orig_lhs)
5426 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5427 lhs, rhs);
5428 return NULL_TREE;
5431 /* Find ways of folding logical expressions of LHS and RHS:
5432 Try to merge two comparisons to the same innermost item.
5433 Look for range tests like "ch >= '0' && ch <= '9'".
5434 Look for combinations of simple terms on machines with expensive branches
5435 and evaluate the RHS unconditionally.
5437 For example, if we have p->a == 2 && p->b == 4 and we can make an
5438 object large enough to span both A and B, we can do this with a comparison
5439 against the object ANDed with the a mask.
5441 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5442 operations to do this with one comparison.
5444 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5445 function and the one above.
5447 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5448 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5450 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5451 two operands.
5453 We return the simplified tree or 0 if no optimization is possible. */
5455 static tree
5456 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5457 tree lhs, tree rhs)
5459 /* If this is the "or" of two comparisons, we can do something if
5460 the comparisons are NE_EXPR. If this is the "and", we can do something
5461 if the comparisons are EQ_EXPR. I.e.,
5462 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5464 WANTED_CODE is this operation code. For single bit fields, we can
5465 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5466 comparison for one-bit fields. */
5468 enum tree_code wanted_code;
5469 enum tree_code lcode, rcode;
5470 tree ll_arg, lr_arg, rl_arg, rr_arg;
5471 tree ll_inner, lr_inner, rl_inner, rr_inner;
5472 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5473 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5474 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5475 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5476 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5477 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5478 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5479 machine_mode lnmode, rnmode;
5480 tree ll_mask, lr_mask, rl_mask, rr_mask;
5481 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5482 tree l_const, r_const;
5483 tree lntype, rntype, result;
5484 HOST_WIDE_INT first_bit, end_bit;
5485 int volatilep;
5487 /* Start by getting the comparison codes. Fail if anything is volatile.
5488 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5489 it were surrounded with a NE_EXPR. */
5491 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5492 return 0;
5494 lcode = TREE_CODE (lhs);
5495 rcode = TREE_CODE (rhs);
5497 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5499 lhs = build2 (NE_EXPR, truth_type, lhs,
5500 build_int_cst (TREE_TYPE (lhs), 0));
5501 lcode = NE_EXPR;
5504 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5506 rhs = build2 (NE_EXPR, truth_type, rhs,
5507 build_int_cst (TREE_TYPE (rhs), 0));
5508 rcode = NE_EXPR;
5511 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5512 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5513 return 0;
5515 ll_arg = TREE_OPERAND (lhs, 0);
5516 lr_arg = TREE_OPERAND (lhs, 1);
5517 rl_arg = TREE_OPERAND (rhs, 0);
5518 rr_arg = TREE_OPERAND (rhs, 1);
5520 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5521 if (simple_operand_p (ll_arg)
5522 && simple_operand_p (lr_arg))
5524 if (operand_equal_p (ll_arg, rl_arg, 0)
5525 && operand_equal_p (lr_arg, rr_arg, 0))
5527 result = combine_comparisons (loc, code, lcode, rcode,
5528 truth_type, ll_arg, lr_arg);
5529 if (result)
5530 return result;
5532 else if (operand_equal_p (ll_arg, rr_arg, 0)
5533 && operand_equal_p (lr_arg, rl_arg, 0))
5535 result = combine_comparisons (loc, code, lcode,
5536 swap_tree_comparison (rcode),
5537 truth_type, ll_arg, lr_arg);
5538 if (result)
5539 return result;
5543 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5544 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5546 /* If the RHS can be evaluated unconditionally and its operands are
5547 simple, it wins to evaluate the RHS unconditionally on machines
5548 with expensive branches. In this case, this isn't a comparison
5549 that can be merged. */
5551 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5552 false) >= 2
5553 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5554 && simple_operand_p (rl_arg)
5555 && simple_operand_p (rr_arg))
5557 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5558 if (code == TRUTH_OR_EXPR
5559 && lcode == NE_EXPR && integer_zerop (lr_arg)
5560 && rcode == NE_EXPR && integer_zerop (rr_arg)
5561 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5562 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5563 return build2_loc (loc, NE_EXPR, truth_type,
5564 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5565 ll_arg, rl_arg),
5566 build_int_cst (TREE_TYPE (ll_arg), 0));
5568 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5569 if (code == TRUTH_AND_EXPR
5570 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5571 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5572 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5573 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5574 return build2_loc (loc, EQ_EXPR, truth_type,
5575 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5576 ll_arg, rl_arg),
5577 build_int_cst (TREE_TYPE (ll_arg), 0));
5580 /* See if the comparisons can be merged. Then get all the parameters for
5581 each side. */
5583 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5584 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5585 return 0;
5587 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5588 volatilep = 0;
5589 ll_inner = decode_field_reference (loc, ll_arg,
5590 &ll_bitsize, &ll_bitpos, &ll_mode,
5591 &ll_unsignedp, &ll_reversep, &volatilep,
5592 &ll_mask, &ll_and_mask);
5593 lr_inner = decode_field_reference (loc, lr_arg,
5594 &lr_bitsize, &lr_bitpos, &lr_mode,
5595 &lr_unsignedp, &lr_reversep, &volatilep,
5596 &lr_mask, &lr_and_mask);
5597 rl_inner = decode_field_reference (loc, rl_arg,
5598 &rl_bitsize, &rl_bitpos, &rl_mode,
5599 &rl_unsignedp, &rl_reversep, &volatilep,
5600 &rl_mask, &rl_and_mask);
5601 rr_inner = decode_field_reference (loc, rr_arg,
5602 &rr_bitsize, &rr_bitpos, &rr_mode,
5603 &rr_unsignedp, &rr_reversep, &volatilep,
5604 &rr_mask, &rr_and_mask);
5606 /* It must be true that the inner operation on the lhs of each
5607 comparison must be the same if we are to be able to do anything.
5608 Then see if we have constants. If not, the same must be true for
5609 the rhs's. */
5610 if (volatilep
5611 || ll_reversep != rl_reversep
5612 || ll_inner == 0 || rl_inner == 0
5613 || ! operand_equal_p (ll_inner, rl_inner, 0))
5614 return 0;
5616 if (TREE_CODE (lr_arg) == INTEGER_CST
5617 && TREE_CODE (rr_arg) == INTEGER_CST)
5619 l_const = lr_arg, r_const = rr_arg;
5620 lr_reversep = ll_reversep;
5622 else if (lr_reversep != rr_reversep
5623 || lr_inner == 0 || rr_inner == 0
5624 || ! operand_equal_p (lr_inner, rr_inner, 0))
5625 return 0;
5626 else
5627 l_const = r_const = 0;
5629 /* If either comparison code is not correct for our logical operation,
5630 fail. However, we can convert a one-bit comparison against zero into
5631 the opposite comparison against that bit being set in the field. */
5633 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5634 if (lcode != wanted_code)
5636 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5638 /* Make the left operand unsigned, since we are only interested
5639 in the value of one bit. Otherwise we are doing the wrong
5640 thing below. */
5641 ll_unsignedp = 1;
5642 l_const = ll_mask;
5644 else
5645 return 0;
5648 /* This is analogous to the code for l_const above. */
5649 if (rcode != wanted_code)
5651 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5653 rl_unsignedp = 1;
5654 r_const = rl_mask;
5656 else
5657 return 0;
5660 /* See if we can find a mode that contains both fields being compared on
5661 the left. If we can't, fail. Otherwise, update all constants and masks
5662 to be relative to a field of that size. */
5663 first_bit = MIN (ll_bitpos, rl_bitpos);
5664 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5665 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5666 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5667 volatilep);
5668 if (lnmode == VOIDmode)
5669 return 0;
5671 lnbitsize = GET_MODE_BITSIZE (lnmode);
5672 lnbitpos = first_bit & ~ (lnbitsize - 1);
5673 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5674 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5676 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5678 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5679 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5682 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5683 size_int (xll_bitpos));
5684 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5685 size_int (xrl_bitpos));
5687 if (l_const)
5689 l_const = fold_convert_loc (loc, lntype, l_const);
5690 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5691 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5692 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5693 fold_build1_loc (loc, BIT_NOT_EXPR,
5694 lntype, ll_mask))))
5696 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5698 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5701 if (r_const)
5703 r_const = fold_convert_loc (loc, lntype, r_const);
5704 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5705 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5706 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5707 fold_build1_loc (loc, BIT_NOT_EXPR,
5708 lntype, rl_mask))))
5710 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5712 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5716 /* If the right sides are not constant, do the same for it. Also,
5717 disallow this optimization if a size or signedness mismatch occurs
5718 between the left and right sides. */
5719 if (l_const == 0)
5721 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5722 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5723 /* Make sure the two fields on the right
5724 correspond to the left without being swapped. */
5725 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5726 return 0;
5728 first_bit = MIN (lr_bitpos, rr_bitpos);
5729 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5730 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5731 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5732 volatilep);
5733 if (rnmode == VOIDmode)
5734 return 0;
5736 rnbitsize = GET_MODE_BITSIZE (rnmode);
5737 rnbitpos = first_bit & ~ (rnbitsize - 1);
5738 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5739 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5741 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5743 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5744 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5747 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5748 rntype, lr_mask),
5749 size_int (xlr_bitpos));
5750 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5751 rntype, rr_mask),
5752 size_int (xrr_bitpos));
5754 /* Make a mask that corresponds to both fields being compared.
5755 Do this for both items being compared. If the operands are the
5756 same size and the bits being compared are in the same position
5757 then we can do this by masking both and comparing the masked
5758 results. */
5759 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5760 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5761 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5763 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5764 ll_unsignedp || rl_unsignedp, ll_reversep);
5765 if (! all_ones_mask_p (ll_mask, lnbitsize))
5766 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5768 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5769 lr_unsignedp || rr_unsignedp, lr_reversep);
5770 if (! all_ones_mask_p (lr_mask, rnbitsize))
5771 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5773 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5776 /* There is still another way we can do something: If both pairs of
5777 fields being compared are adjacent, we may be able to make a wider
5778 field containing them both.
5780 Note that we still must mask the lhs/rhs expressions. Furthermore,
5781 the mask must be shifted to account for the shift done by
5782 make_bit_field_ref. */
5783 if ((ll_bitsize + ll_bitpos == rl_bitpos
5784 && lr_bitsize + lr_bitpos == rr_bitpos)
5785 || (ll_bitpos == rl_bitpos + rl_bitsize
5786 && lr_bitpos == rr_bitpos + rr_bitsize))
5788 tree type;
5790 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5791 ll_bitsize + rl_bitsize,
5792 MIN (ll_bitpos, rl_bitpos),
5793 ll_unsignedp, ll_reversep);
5794 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5795 lr_bitsize + rr_bitsize,
5796 MIN (lr_bitpos, rr_bitpos),
5797 lr_unsignedp, lr_reversep);
5799 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5800 size_int (MIN (xll_bitpos, xrl_bitpos)));
5801 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5802 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5804 /* Convert to the smaller type before masking out unwanted bits. */
5805 type = lntype;
5806 if (lntype != rntype)
5808 if (lnbitsize > rnbitsize)
5810 lhs = fold_convert_loc (loc, rntype, lhs);
5811 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5812 type = rntype;
5814 else if (lnbitsize < rnbitsize)
5816 rhs = fold_convert_loc (loc, lntype, rhs);
5817 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5818 type = lntype;
5822 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5823 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5825 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5826 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5828 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5831 return 0;
5834 /* Handle the case of comparisons with constants. If there is something in
5835 common between the masks, those bits of the constants must be the same.
5836 If not, the condition is always false. Test for this to avoid generating
5837 incorrect code below. */
5838 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5839 if (! integer_zerop (result)
5840 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5841 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5843 if (wanted_code == NE_EXPR)
5845 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5846 return constant_boolean_node (true, truth_type);
5848 else
5850 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5851 return constant_boolean_node (false, truth_type);
5855 /* Construct the expression we will return. First get the component
5856 reference we will make. Unless the mask is all ones the width of
5857 that field, perform the mask operation. Then compare with the
5858 merged constant. */
5859 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5860 ll_unsignedp || rl_unsignedp, ll_reversep);
5862 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5863 if (! all_ones_mask_p (ll_mask, lnbitsize))
5864 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5866 return build2_loc (loc, wanted_code, truth_type, result,
5867 const_binop (BIT_IOR_EXPR, l_const, r_const));
5870 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5871 constant. */
5873 static tree
5874 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5875 tree op0, tree op1)
5877 tree arg0 = op0;
5878 enum tree_code op_code;
5879 tree comp_const;
5880 tree minmax_const;
5881 int consts_equal, consts_lt;
5882 tree inner;
5884 STRIP_SIGN_NOPS (arg0);
5886 op_code = TREE_CODE (arg0);
5887 minmax_const = TREE_OPERAND (arg0, 1);
5888 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5889 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5890 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5891 inner = TREE_OPERAND (arg0, 0);
5893 /* If something does not permit us to optimize, return the original tree. */
5894 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5895 || TREE_CODE (comp_const) != INTEGER_CST
5896 || TREE_OVERFLOW (comp_const)
5897 || TREE_CODE (minmax_const) != INTEGER_CST
5898 || TREE_OVERFLOW (minmax_const))
5899 return NULL_TREE;
5901 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5902 and GT_EXPR, doing the rest with recursive calls using logical
5903 simplifications. */
5904 switch (code)
5906 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5908 tree tem
5909 = optimize_minmax_comparison (loc,
5910 invert_tree_comparison (code, false),
5911 type, op0, op1);
5912 if (tem)
5913 return invert_truthvalue_loc (loc, tem);
5914 return NULL_TREE;
5917 case GE_EXPR:
5918 return
5919 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5920 optimize_minmax_comparison
5921 (loc, EQ_EXPR, type, arg0, comp_const),
5922 optimize_minmax_comparison
5923 (loc, GT_EXPR, type, arg0, comp_const));
5925 case EQ_EXPR:
5926 if (op_code == MAX_EXPR && consts_equal)
5927 /* MAX (X, 0) == 0 -> X <= 0 */
5928 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5930 else if (op_code == MAX_EXPR && consts_lt)
5931 /* MAX (X, 0) == 5 -> X == 5 */
5932 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5934 else if (op_code == MAX_EXPR)
5935 /* MAX (X, 0) == -1 -> false */
5936 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5938 else if (consts_equal)
5939 /* MIN (X, 0) == 0 -> X >= 0 */
5940 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5942 else if (consts_lt)
5943 /* MIN (X, 0) == 5 -> false */
5944 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5946 else
5947 /* MIN (X, 0) == -1 -> X == -1 */
5948 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5950 case GT_EXPR:
5951 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5952 /* MAX (X, 0) > 0 -> X > 0
5953 MAX (X, 0) > 5 -> X > 5 */
5954 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5956 else if (op_code == MAX_EXPR)
5957 /* MAX (X, 0) > -1 -> true */
5958 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5960 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5961 /* MIN (X, 0) > 0 -> false
5962 MIN (X, 0) > 5 -> false */
5963 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5965 else
5966 /* MIN (X, 0) > -1 -> X > -1 */
5967 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5969 default:
5970 return NULL_TREE;
5974 /* T is an integer expression that is being multiplied, divided, or taken a
5975 modulus (CODE says which and what kind of divide or modulus) by a
5976 constant C. See if we can eliminate that operation by folding it with
5977 other operations already in T. WIDE_TYPE, if non-null, is a type that
5978 should be used for the computation if wider than our type.
5980 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5981 (X * 2) + (Y * 4). We must, however, be assured that either the original
5982 expression would not overflow or that overflow is undefined for the type
5983 in the language in question.
5985 If we return a non-null expression, it is an equivalent form of the
5986 original computation, but need not be in the original type.
5988 We set *STRICT_OVERFLOW_P to true if the return values depends on
5989 signed overflow being undefined. Otherwise we do not change
5990 *STRICT_OVERFLOW_P. */
5992 static tree
5993 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5994 bool *strict_overflow_p)
5996 /* To avoid exponential search depth, refuse to allow recursion past
5997 three levels. Beyond that (1) it's highly unlikely that we'll find
5998 something interesting and (2) we've probably processed it before
5999 when we built the inner expression. */
6001 static int depth;
6002 tree ret;
6004 if (depth > 3)
6005 return NULL;
6007 depth++;
6008 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6009 depth--;
6011 return ret;
6014 static tree
6015 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6016 bool *strict_overflow_p)
6018 tree type = TREE_TYPE (t);
6019 enum tree_code tcode = TREE_CODE (t);
6020 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6021 > GET_MODE_SIZE (TYPE_MODE (type)))
6022 ? wide_type : type);
6023 tree t1, t2;
6024 int same_p = tcode == code;
6025 tree op0 = NULL_TREE, op1 = NULL_TREE;
6026 bool sub_strict_overflow_p;
6028 /* Don't deal with constants of zero here; they confuse the code below. */
6029 if (integer_zerop (c))
6030 return NULL_TREE;
6032 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6033 op0 = TREE_OPERAND (t, 0);
6035 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6036 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6038 /* Note that we need not handle conditional operations here since fold
6039 already handles those cases. So just do arithmetic here. */
6040 switch (tcode)
6042 case INTEGER_CST:
6043 /* For a constant, we can always simplify if we are a multiply
6044 or (for divide and modulus) if it is a multiple of our constant. */
6045 if (code == MULT_EXPR
6046 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6048 tree tem = const_binop (code, fold_convert (ctype, t),
6049 fold_convert (ctype, c));
6050 /* If the multiplication overflowed to INT_MIN then we lost sign
6051 information on it and a subsequent multiplication might
6052 spuriously overflow. See PR68142. */
6053 if (TREE_OVERFLOW (tem)
6054 && wi::eq_p (tem, wi::min_value (TYPE_PRECISION (ctype), SIGNED)))
6055 return NULL_TREE;
6056 return tem;
6058 break;
6060 CASE_CONVERT: case NON_LVALUE_EXPR:
6061 /* If op0 is an expression ... */
6062 if ((COMPARISON_CLASS_P (op0)
6063 || UNARY_CLASS_P (op0)
6064 || BINARY_CLASS_P (op0)
6065 || VL_EXP_CLASS_P (op0)
6066 || EXPRESSION_CLASS_P (op0))
6067 /* ... and has wrapping overflow, and its type is smaller
6068 than ctype, then we cannot pass through as widening. */
6069 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6070 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6071 && (TYPE_PRECISION (ctype)
6072 > TYPE_PRECISION (TREE_TYPE (op0))))
6073 /* ... or this is a truncation (t is narrower than op0),
6074 then we cannot pass through this narrowing. */
6075 || (TYPE_PRECISION (type)
6076 < TYPE_PRECISION (TREE_TYPE (op0)))
6077 /* ... or signedness changes for division or modulus,
6078 then we cannot pass through this conversion. */
6079 || (code != MULT_EXPR
6080 && (TYPE_UNSIGNED (ctype)
6081 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6082 /* ... or has undefined overflow while the converted to
6083 type has not, we cannot do the operation in the inner type
6084 as that would introduce undefined overflow. */
6085 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6086 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6087 && !TYPE_OVERFLOW_UNDEFINED (type))))
6088 break;
6090 /* Pass the constant down and see if we can make a simplification. If
6091 we can, replace this expression with the inner simplification for
6092 possible later conversion to our or some other type. */
6093 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6094 && TREE_CODE (t2) == INTEGER_CST
6095 && !TREE_OVERFLOW (t2)
6096 && (0 != (t1 = extract_muldiv (op0, t2, code,
6097 code == MULT_EXPR
6098 ? ctype : NULL_TREE,
6099 strict_overflow_p))))
6100 return t1;
6101 break;
6103 case ABS_EXPR:
6104 /* If widening the type changes it from signed to unsigned, then we
6105 must avoid building ABS_EXPR itself as unsigned. */
6106 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6108 tree cstype = (*signed_type_for) (ctype);
6109 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6110 != 0)
6112 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6113 return fold_convert (ctype, t1);
6115 break;
6117 /* If the constant is negative, we cannot simplify this. */
6118 if (tree_int_cst_sgn (c) == -1)
6119 break;
6120 /* FALLTHROUGH */
6121 case NEGATE_EXPR:
6122 /* For division and modulus, type can't be unsigned, as e.g.
6123 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6124 For signed types, even with wrapping overflow, this is fine. */
6125 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6126 break;
6127 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6128 != 0)
6129 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6130 break;
6132 case MIN_EXPR: case MAX_EXPR:
6133 /* If widening the type changes the signedness, then we can't perform
6134 this optimization as that changes the result. */
6135 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6136 break;
6138 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6139 sub_strict_overflow_p = false;
6140 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6141 &sub_strict_overflow_p)) != 0
6142 && (t2 = extract_muldiv (op1, c, code, wide_type,
6143 &sub_strict_overflow_p)) != 0)
6145 if (tree_int_cst_sgn (c) < 0)
6146 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6147 if (sub_strict_overflow_p)
6148 *strict_overflow_p = true;
6149 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6150 fold_convert (ctype, t2));
6152 break;
6154 case LSHIFT_EXPR: case RSHIFT_EXPR:
6155 /* If the second operand is constant, this is a multiplication
6156 or floor division, by a power of two, so we can treat it that
6157 way unless the multiplier or divisor overflows. Signed
6158 left-shift overflow is implementation-defined rather than
6159 undefined in C90, so do not convert signed left shift into
6160 multiplication. */
6161 if (TREE_CODE (op1) == INTEGER_CST
6162 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6163 /* const_binop may not detect overflow correctly,
6164 so check for it explicitly here. */
6165 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6166 && 0 != (t1 = fold_convert (ctype,
6167 const_binop (LSHIFT_EXPR,
6168 size_one_node,
6169 op1)))
6170 && !TREE_OVERFLOW (t1))
6171 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6172 ? MULT_EXPR : FLOOR_DIV_EXPR,
6173 ctype,
6174 fold_convert (ctype, op0),
6175 t1),
6176 c, code, wide_type, strict_overflow_p);
6177 break;
6179 case PLUS_EXPR: case MINUS_EXPR:
6180 /* See if we can eliminate the operation on both sides. If we can, we
6181 can return a new PLUS or MINUS. If we can't, the only remaining
6182 cases where we can do anything are if the second operand is a
6183 constant. */
6184 sub_strict_overflow_p = false;
6185 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6186 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6187 if (t1 != 0 && t2 != 0
6188 && (code == MULT_EXPR
6189 /* If not multiplication, we can only do this if both operands
6190 are divisible by c. */
6191 || (multiple_of_p (ctype, op0, c)
6192 && multiple_of_p (ctype, op1, c))))
6194 if (sub_strict_overflow_p)
6195 *strict_overflow_p = true;
6196 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6197 fold_convert (ctype, t2));
6200 /* If this was a subtraction, negate OP1 and set it to be an addition.
6201 This simplifies the logic below. */
6202 if (tcode == MINUS_EXPR)
6204 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6205 /* If OP1 was not easily negatable, the constant may be OP0. */
6206 if (TREE_CODE (op0) == INTEGER_CST)
6208 std::swap (op0, op1);
6209 std::swap (t1, t2);
6213 if (TREE_CODE (op1) != INTEGER_CST)
6214 break;
6216 /* If either OP1 or C are negative, this optimization is not safe for
6217 some of the division and remainder types while for others we need
6218 to change the code. */
6219 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6221 if (code == CEIL_DIV_EXPR)
6222 code = FLOOR_DIV_EXPR;
6223 else if (code == FLOOR_DIV_EXPR)
6224 code = CEIL_DIV_EXPR;
6225 else if (code != MULT_EXPR
6226 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6227 break;
6230 /* If it's a multiply or a division/modulus operation of a multiple
6231 of our constant, do the operation and verify it doesn't overflow. */
6232 if (code == MULT_EXPR
6233 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6235 op1 = const_binop (code, fold_convert (ctype, op1),
6236 fold_convert (ctype, c));
6237 /* We allow the constant to overflow with wrapping semantics. */
6238 if (op1 == 0
6239 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6240 break;
6242 else
6243 break;
6245 /* If we have an unsigned type, we cannot widen the operation since it
6246 will change the result if the original computation overflowed. */
6247 if (TYPE_UNSIGNED (ctype) && ctype != type)
6248 break;
6250 /* If we were able to eliminate our operation from the first side,
6251 apply our operation to the second side and reform the PLUS. */
6252 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6253 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6255 /* The last case is if we are a multiply. In that case, we can
6256 apply the distributive law to commute the multiply and addition
6257 if the multiplication of the constants doesn't overflow
6258 and overflow is defined. With undefined overflow
6259 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6260 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6261 return fold_build2 (tcode, ctype,
6262 fold_build2 (code, ctype,
6263 fold_convert (ctype, op0),
6264 fold_convert (ctype, c)),
6265 op1);
6267 break;
6269 case MULT_EXPR:
6270 /* We have a special case here if we are doing something like
6271 (C * 8) % 4 since we know that's zero. */
6272 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6273 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6274 /* If the multiplication can overflow we cannot optimize this. */
6275 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6276 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6277 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6279 *strict_overflow_p = true;
6280 return omit_one_operand (type, integer_zero_node, op0);
6283 /* ... fall through ... */
6285 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6286 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6287 /* If we can extract our operation from the LHS, do so and return a
6288 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6289 do something only if the second operand is a constant. */
6290 if (same_p
6291 && (t1 = extract_muldiv (op0, c, code, wide_type,
6292 strict_overflow_p)) != 0)
6293 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6294 fold_convert (ctype, op1));
6295 else if (tcode == MULT_EXPR && code == MULT_EXPR
6296 && (t1 = extract_muldiv (op1, c, code, wide_type,
6297 strict_overflow_p)) != 0)
6298 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6299 fold_convert (ctype, t1));
6300 else if (TREE_CODE (op1) != INTEGER_CST)
6301 return 0;
6303 /* If these are the same operation types, we can associate them
6304 assuming no overflow. */
6305 if (tcode == code)
6307 bool overflow_p = false;
6308 bool overflow_mul_p;
6309 signop sign = TYPE_SIGN (ctype);
6310 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6311 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6312 if (overflow_mul_p
6313 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6314 overflow_p = true;
6315 if (!overflow_p)
6317 mul = wide_int::from (mul, TYPE_PRECISION (ctype),
6318 TYPE_SIGN (TREE_TYPE (op1)));
6319 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6320 wide_int_to_tree (ctype, mul));
6324 /* If these operations "cancel" each other, we have the main
6325 optimizations of this pass, which occur when either constant is a
6326 multiple of the other, in which case we replace this with either an
6327 operation or CODE or TCODE.
6329 If we have an unsigned type, we cannot do this since it will change
6330 the result if the original computation overflowed. */
6331 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6332 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6333 || (tcode == MULT_EXPR
6334 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6335 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6336 && code != MULT_EXPR)))
6338 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6340 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6341 *strict_overflow_p = true;
6342 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6343 fold_convert (ctype,
6344 const_binop (TRUNC_DIV_EXPR,
6345 op1, c)));
6347 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6349 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6350 *strict_overflow_p = true;
6351 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6352 fold_convert (ctype,
6353 const_binop (TRUNC_DIV_EXPR,
6354 c, op1)));
6357 break;
6359 default:
6360 break;
6363 return 0;
6366 /* Return a node which has the indicated constant VALUE (either 0 or
6367 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6368 and is of the indicated TYPE. */
6370 tree
6371 constant_boolean_node (bool value, tree type)
6373 if (type == integer_type_node)
6374 return value ? integer_one_node : integer_zero_node;
6375 else if (type == boolean_type_node)
6376 return value ? boolean_true_node : boolean_false_node;
6377 else if (TREE_CODE (type) == VECTOR_TYPE)
6378 return build_vector_from_val (type,
6379 build_int_cst (TREE_TYPE (type),
6380 value ? -1 : 0));
6381 else
6382 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6386 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6387 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6388 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6389 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6390 COND is the first argument to CODE; otherwise (as in the example
6391 given here), it is the second argument. TYPE is the type of the
6392 original expression. Return NULL_TREE if no simplification is
6393 possible. */
6395 static tree
6396 fold_binary_op_with_conditional_arg (location_t loc,
6397 enum tree_code code,
6398 tree type, tree op0, tree op1,
6399 tree cond, tree arg, int cond_first_p)
6401 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6402 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6403 tree test, true_value, false_value;
6404 tree lhs = NULL_TREE;
6405 tree rhs = NULL_TREE;
6406 enum tree_code cond_code = COND_EXPR;
6408 if (TREE_CODE (cond) == COND_EXPR
6409 || TREE_CODE (cond) == VEC_COND_EXPR)
6411 test = TREE_OPERAND (cond, 0);
6412 true_value = TREE_OPERAND (cond, 1);
6413 false_value = TREE_OPERAND (cond, 2);
6414 /* If this operand throws an expression, then it does not make
6415 sense to try to perform a logical or arithmetic operation
6416 involving it. */
6417 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6418 lhs = true_value;
6419 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6420 rhs = false_value;
6422 else
6424 tree testtype = TREE_TYPE (cond);
6425 test = cond;
6426 true_value = constant_boolean_node (true, testtype);
6427 false_value = constant_boolean_node (false, testtype);
6430 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6431 cond_code = VEC_COND_EXPR;
6433 /* This transformation is only worthwhile if we don't have to wrap ARG
6434 in a SAVE_EXPR and the operation can be simplified without recursing
6435 on at least one of the branches once its pushed inside the COND_EXPR. */
6436 if (!TREE_CONSTANT (arg)
6437 && (TREE_SIDE_EFFECTS (arg)
6438 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6439 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6440 return NULL_TREE;
6442 arg = fold_convert_loc (loc, arg_type, arg);
6443 if (lhs == 0)
6445 true_value = fold_convert_loc (loc, cond_type, true_value);
6446 if (cond_first_p)
6447 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6448 else
6449 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6451 if (rhs == 0)
6453 false_value = fold_convert_loc (loc, cond_type, false_value);
6454 if (cond_first_p)
6455 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6456 else
6457 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6460 /* Check that we have simplified at least one of the branches. */
6461 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6462 return NULL_TREE;
6464 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6468 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6470 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6471 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6472 ADDEND is the same as X.
6474 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6475 and finite. The problematic cases are when X is zero, and its mode
6476 has signed zeros. In the case of rounding towards -infinity,
6477 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6478 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6480 bool
6481 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6483 if (!real_zerop (addend))
6484 return false;
6486 /* Don't allow the fold with -fsignaling-nans. */
6487 if (HONOR_SNANS (element_mode (type)))
6488 return false;
6490 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6491 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6492 return true;
6494 /* In a vector or complex, we would need to check the sign of all zeros. */
6495 if (TREE_CODE (addend) != REAL_CST)
6496 return false;
6498 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6499 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6500 negate = !negate;
6502 /* The mode has signed zeros, and we have to honor their sign.
6503 In this situation, there is only one case we can return true for.
6504 X - 0 is the same as X unless rounding towards -infinity is
6505 supported. */
6506 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6509 /* Subroutine of fold() that optimizes comparisons of a division by
6510 a nonzero integer constant against an integer constant, i.e.
6511 X/C1 op C2.
6513 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6514 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6515 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6517 The function returns the constant folded tree if a simplification
6518 can be made, and NULL_TREE otherwise. */
6520 static tree
6521 fold_div_compare (location_t loc,
6522 enum tree_code code, tree type, tree arg0, tree arg1)
6524 tree prod, tmp, hi, lo;
6525 tree arg00 = TREE_OPERAND (arg0, 0);
6526 tree arg01 = TREE_OPERAND (arg0, 1);
6527 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6528 bool neg_overflow = false;
6529 bool overflow;
6531 /* We have to do this the hard way to detect unsigned overflow.
6532 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6533 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6534 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6535 neg_overflow = false;
6537 if (sign == UNSIGNED)
6539 tmp = int_const_binop (MINUS_EXPR, arg01,
6540 build_int_cst (TREE_TYPE (arg01), 1));
6541 lo = prod;
6543 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6544 val = wi::add (prod, tmp, sign, &overflow);
6545 hi = force_fit_type (TREE_TYPE (arg00), val,
6546 -1, overflow | TREE_OVERFLOW (prod));
6548 else if (tree_int_cst_sgn (arg01) >= 0)
6550 tmp = int_const_binop (MINUS_EXPR, arg01,
6551 build_int_cst (TREE_TYPE (arg01), 1));
6552 switch (tree_int_cst_sgn (arg1))
6554 case -1:
6555 neg_overflow = true;
6556 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6557 hi = prod;
6558 break;
6560 case 0:
6561 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6562 hi = tmp;
6563 break;
6565 case 1:
6566 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6567 lo = prod;
6568 break;
6570 default:
6571 gcc_unreachable ();
6574 else
6576 /* A negative divisor reverses the relational operators. */
6577 code = swap_tree_comparison (code);
6579 tmp = int_const_binop (PLUS_EXPR, arg01,
6580 build_int_cst (TREE_TYPE (arg01), 1));
6581 switch (tree_int_cst_sgn (arg1))
6583 case -1:
6584 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6585 lo = prod;
6586 break;
6588 case 0:
6589 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6590 lo = tmp;
6591 break;
6593 case 1:
6594 neg_overflow = true;
6595 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6596 hi = prod;
6597 break;
6599 default:
6600 gcc_unreachable ();
6604 switch (code)
6606 case EQ_EXPR:
6607 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6608 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6609 if (TREE_OVERFLOW (hi))
6610 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6611 if (TREE_OVERFLOW (lo))
6612 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6613 return build_range_check (loc, type, arg00, 1, lo, hi);
6615 case NE_EXPR:
6616 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6617 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6618 if (TREE_OVERFLOW (hi))
6619 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6620 if (TREE_OVERFLOW (lo))
6621 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6622 return build_range_check (loc, type, arg00, 0, lo, hi);
6624 case LT_EXPR:
6625 if (TREE_OVERFLOW (lo))
6627 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6628 return omit_one_operand_loc (loc, type, tmp, arg00);
6630 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6632 case LE_EXPR:
6633 if (TREE_OVERFLOW (hi))
6635 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6636 return omit_one_operand_loc (loc, type, tmp, arg00);
6638 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6640 case GT_EXPR:
6641 if (TREE_OVERFLOW (hi))
6643 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6644 return omit_one_operand_loc (loc, type, tmp, arg00);
6646 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6648 case GE_EXPR:
6649 if (TREE_OVERFLOW (lo))
6651 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6652 return omit_one_operand_loc (loc, type, tmp, arg00);
6654 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6656 default:
6657 break;
6660 return NULL_TREE;
6664 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6665 equality/inequality test, then return a simplified form of the test
6666 using a sign testing. Otherwise return NULL. TYPE is the desired
6667 result type. */
6669 static tree
6670 fold_single_bit_test_into_sign_test (location_t loc,
6671 enum tree_code code, tree arg0, tree arg1,
6672 tree result_type)
6674 /* If this is testing a single bit, we can optimize the test. */
6675 if ((code == NE_EXPR || code == EQ_EXPR)
6676 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6677 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6679 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6680 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6681 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6683 if (arg00 != NULL_TREE
6684 /* This is only a win if casting to a signed type is cheap,
6685 i.e. when arg00's type is not a partial mode. */
6686 && TYPE_PRECISION (TREE_TYPE (arg00))
6687 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6689 tree stype = signed_type_for (TREE_TYPE (arg00));
6690 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6691 result_type,
6692 fold_convert_loc (loc, stype, arg00),
6693 build_int_cst (stype, 0));
6697 return NULL_TREE;
6700 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6701 equality/inequality test, then return a simplified form of
6702 the test using shifts and logical operations. Otherwise return
6703 NULL. TYPE is the desired result type. */
6705 tree
6706 fold_single_bit_test (location_t loc, enum tree_code code,
6707 tree arg0, tree arg1, tree result_type)
6709 /* If this is testing a single bit, we can optimize the test. */
6710 if ((code == NE_EXPR || code == EQ_EXPR)
6711 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6712 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6714 tree inner = TREE_OPERAND (arg0, 0);
6715 tree type = TREE_TYPE (arg0);
6716 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6717 machine_mode operand_mode = TYPE_MODE (type);
6718 int ops_unsigned;
6719 tree signed_type, unsigned_type, intermediate_type;
6720 tree tem, one;
6722 /* First, see if we can fold the single bit test into a sign-bit
6723 test. */
6724 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6725 result_type);
6726 if (tem)
6727 return tem;
6729 /* Otherwise we have (A & C) != 0 where C is a single bit,
6730 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6731 Similarly for (A & C) == 0. */
6733 /* If INNER is a right shift of a constant and it plus BITNUM does
6734 not overflow, adjust BITNUM and INNER. */
6735 if (TREE_CODE (inner) == RSHIFT_EXPR
6736 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6737 && bitnum < TYPE_PRECISION (type)
6738 && wi::ltu_p (TREE_OPERAND (inner, 1),
6739 TYPE_PRECISION (type) - bitnum))
6741 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6742 inner = TREE_OPERAND (inner, 0);
6745 /* If we are going to be able to omit the AND below, we must do our
6746 operations as unsigned. If we must use the AND, we have a choice.
6747 Normally unsigned is faster, but for some machines signed is. */
6748 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6749 && !flag_syntax_only) ? 0 : 1;
6751 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6752 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6753 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6754 inner = fold_convert_loc (loc, intermediate_type, inner);
6756 if (bitnum != 0)
6757 inner = build2 (RSHIFT_EXPR, intermediate_type,
6758 inner, size_int (bitnum));
6760 one = build_int_cst (intermediate_type, 1);
6762 if (code == EQ_EXPR)
6763 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6765 /* Put the AND last so it can combine with more things. */
6766 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6768 /* Make sure to return the proper type. */
6769 inner = fold_convert_loc (loc, result_type, inner);
6771 return inner;
6773 return NULL_TREE;
6776 /* Check whether we are allowed to reorder operands arg0 and arg1,
6777 such that the evaluation of arg1 occurs before arg0. */
6779 static bool
6780 reorder_operands_p (const_tree arg0, const_tree arg1)
6782 if (! flag_evaluation_order)
6783 return true;
6784 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6785 return true;
6786 return ! TREE_SIDE_EFFECTS (arg0)
6787 && ! TREE_SIDE_EFFECTS (arg1);
6790 /* Test whether it is preferable two swap two operands, ARG0 and
6791 ARG1, for example because ARG0 is an integer constant and ARG1
6792 isn't. If REORDER is true, only recommend swapping if we can
6793 evaluate the operands in reverse order. */
6795 bool
6796 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6798 if (CONSTANT_CLASS_P (arg1))
6799 return 0;
6800 if (CONSTANT_CLASS_P (arg0))
6801 return 1;
6803 STRIP_NOPS (arg0);
6804 STRIP_NOPS (arg1);
6806 if (TREE_CONSTANT (arg1))
6807 return 0;
6808 if (TREE_CONSTANT (arg0))
6809 return 1;
6811 if (reorder && flag_evaluation_order
6812 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6813 return 0;
6815 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6816 for commutative and comparison operators. Ensuring a canonical
6817 form allows the optimizers to find additional redundancies without
6818 having to explicitly check for both orderings. */
6819 if (TREE_CODE (arg0) == SSA_NAME
6820 && TREE_CODE (arg1) == SSA_NAME
6821 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6822 return 1;
6824 /* Put SSA_NAMEs last. */
6825 if (TREE_CODE (arg1) == SSA_NAME)
6826 return 0;
6827 if (TREE_CODE (arg0) == SSA_NAME)
6828 return 1;
6830 /* Put variables last. */
6831 if (DECL_P (arg1))
6832 return 0;
6833 if (DECL_P (arg0))
6834 return 1;
6836 return 0;
6840 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6841 means A >= Y && A != MAX, but in this case we know that
6842 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6844 static tree
6845 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6847 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6849 if (TREE_CODE (bound) == LT_EXPR)
6850 a = TREE_OPERAND (bound, 0);
6851 else if (TREE_CODE (bound) == GT_EXPR)
6852 a = TREE_OPERAND (bound, 1);
6853 else
6854 return NULL_TREE;
6856 typea = TREE_TYPE (a);
6857 if (!INTEGRAL_TYPE_P (typea)
6858 && !POINTER_TYPE_P (typea))
6859 return NULL_TREE;
6861 if (TREE_CODE (ineq) == LT_EXPR)
6863 a1 = TREE_OPERAND (ineq, 1);
6864 y = TREE_OPERAND (ineq, 0);
6866 else if (TREE_CODE (ineq) == GT_EXPR)
6868 a1 = TREE_OPERAND (ineq, 0);
6869 y = TREE_OPERAND (ineq, 1);
6871 else
6872 return NULL_TREE;
6874 if (TREE_TYPE (a1) != typea)
6875 return NULL_TREE;
6877 if (POINTER_TYPE_P (typea))
6879 /* Convert the pointer types into integer before taking the difference. */
6880 tree ta = fold_convert_loc (loc, ssizetype, a);
6881 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6882 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6884 else
6885 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6887 if (!diff || !integer_onep (diff))
6888 return NULL_TREE;
6890 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6893 /* Fold a sum or difference of at least one multiplication.
6894 Returns the folded tree or NULL if no simplification could be made. */
6896 static tree
6897 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6898 tree arg0, tree arg1)
6900 tree arg00, arg01, arg10, arg11;
6901 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6903 /* (A * C) +- (B * C) -> (A+-B) * C.
6904 (A * C) +- A -> A * (C+-1).
6905 We are most concerned about the case where C is a constant,
6906 but other combinations show up during loop reduction. Since
6907 it is not difficult, try all four possibilities. */
6909 if (TREE_CODE (arg0) == MULT_EXPR)
6911 arg00 = TREE_OPERAND (arg0, 0);
6912 arg01 = TREE_OPERAND (arg0, 1);
6914 else if (TREE_CODE (arg0) == INTEGER_CST)
6916 arg00 = build_one_cst (type);
6917 arg01 = arg0;
6919 else
6921 /* We cannot generate constant 1 for fract. */
6922 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6923 return NULL_TREE;
6924 arg00 = arg0;
6925 arg01 = build_one_cst (type);
6927 if (TREE_CODE (arg1) == MULT_EXPR)
6929 arg10 = TREE_OPERAND (arg1, 0);
6930 arg11 = TREE_OPERAND (arg1, 1);
6932 else if (TREE_CODE (arg1) == INTEGER_CST)
6934 arg10 = build_one_cst (type);
6935 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6936 the purpose of this canonicalization. */
6937 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6938 && negate_expr_p (arg1)
6939 && code == PLUS_EXPR)
6941 arg11 = negate_expr (arg1);
6942 code = MINUS_EXPR;
6944 else
6945 arg11 = arg1;
6947 else
6949 /* We cannot generate constant 1 for fract. */
6950 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6951 return NULL_TREE;
6952 arg10 = arg1;
6953 arg11 = build_one_cst (type);
6955 same = NULL_TREE;
6957 if (operand_equal_p (arg01, arg11, 0))
6958 same = arg01, alt0 = arg00, alt1 = arg10;
6959 else if (operand_equal_p (arg00, arg10, 0))
6960 same = arg00, alt0 = arg01, alt1 = arg11;
6961 else if (operand_equal_p (arg00, arg11, 0))
6962 same = arg00, alt0 = arg01, alt1 = arg10;
6963 else if (operand_equal_p (arg01, arg10, 0))
6964 same = arg01, alt0 = arg00, alt1 = arg11;
6966 /* No identical multiplicands; see if we can find a common
6967 power-of-two factor in non-power-of-two multiplies. This
6968 can help in multi-dimensional array access. */
6969 else if (tree_fits_shwi_p (arg01)
6970 && tree_fits_shwi_p (arg11))
6972 HOST_WIDE_INT int01, int11, tmp;
6973 bool swap = false;
6974 tree maybe_same;
6975 int01 = tree_to_shwi (arg01);
6976 int11 = tree_to_shwi (arg11);
6978 /* Move min of absolute values to int11. */
6979 if (absu_hwi (int01) < absu_hwi (int11))
6981 tmp = int01, int01 = int11, int11 = tmp;
6982 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6983 maybe_same = arg01;
6984 swap = true;
6986 else
6987 maybe_same = arg11;
6989 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6990 /* The remainder should not be a constant, otherwise we
6991 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6992 increased the number of multiplications necessary. */
6993 && TREE_CODE (arg10) != INTEGER_CST)
6995 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6996 build_int_cst (TREE_TYPE (arg00),
6997 int01 / int11));
6998 alt1 = arg10;
6999 same = maybe_same;
7000 if (swap)
7001 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7005 if (same)
7006 return fold_build2_loc (loc, MULT_EXPR, type,
7007 fold_build2_loc (loc, code, type,
7008 fold_convert_loc (loc, type, alt0),
7009 fold_convert_loc (loc, type, alt1)),
7010 fold_convert_loc (loc, type, same));
7012 return NULL_TREE;
7015 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7016 specified by EXPR into the buffer PTR of length LEN bytes.
7017 Return the number of bytes placed in the buffer, or zero
7018 upon failure. */
7020 static int
7021 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7023 tree type = TREE_TYPE (expr);
7024 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7025 int byte, offset, word, words;
7026 unsigned char value;
7028 if ((off == -1 && total_bytes > len)
7029 || off >= total_bytes)
7030 return 0;
7031 if (off == -1)
7032 off = 0;
7033 words = total_bytes / UNITS_PER_WORD;
7035 for (byte = 0; byte < total_bytes; byte++)
7037 int bitpos = byte * BITS_PER_UNIT;
7038 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7039 number of bytes. */
7040 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7042 if (total_bytes > UNITS_PER_WORD)
7044 word = byte / UNITS_PER_WORD;
7045 if (WORDS_BIG_ENDIAN)
7046 word = (words - 1) - word;
7047 offset = word * UNITS_PER_WORD;
7048 if (BYTES_BIG_ENDIAN)
7049 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7050 else
7051 offset += byte % UNITS_PER_WORD;
7053 else
7054 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7055 if (offset >= off
7056 && offset - off < len)
7057 ptr[offset - off] = value;
7059 return MIN (len, total_bytes - off);
7063 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7064 specified by EXPR into the buffer PTR of length LEN bytes.
7065 Return the number of bytes placed in the buffer, or zero
7066 upon failure. */
7068 static int
7069 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7071 tree type = TREE_TYPE (expr);
7072 machine_mode mode = TYPE_MODE (type);
7073 int total_bytes = GET_MODE_SIZE (mode);
7074 FIXED_VALUE_TYPE value;
7075 tree i_value, i_type;
7077 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7078 return 0;
7080 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7082 if (NULL_TREE == i_type
7083 || TYPE_PRECISION (i_type) != total_bytes)
7084 return 0;
7086 value = TREE_FIXED_CST (expr);
7087 i_value = double_int_to_tree (i_type, value.data);
7089 return native_encode_int (i_value, ptr, len, off);
7093 /* Subroutine of native_encode_expr. Encode the REAL_CST
7094 specified by EXPR into the buffer PTR of length LEN bytes.
7095 Return the number of bytes placed in the buffer, or zero
7096 upon failure. */
7098 static int
7099 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7101 tree type = TREE_TYPE (expr);
7102 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7103 int byte, offset, word, words, bitpos;
7104 unsigned char value;
7106 /* There are always 32 bits in each long, no matter the size of
7107 the hosts long. We handle floating point representations with
7108 up to 192 bits. */
7109 long tmp[6];
7111 if ((off == -1 && total_bytes > len)
7112 || off >= total_bytes)
7113 return 0;
7114 if (off == -1)
7115 off = 0;
7116 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7118 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7120 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7121 bitpos += BITS_PER_UNIT)
7123 byte = (bitpos / BITS_PER_UNIT) & 3;
7124 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7126 if (UNITS_PER_WORD < 4)
7128 word = byte / UNITS_PER_WORD;
7129 if (WORDS_BIG_ENDIAN)
7130 word = (words - 1) - word;
7131 offset = word * UNITS_PER_WORD;
7132 if (BYTES_BIG_ENDIAN)
7133 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7134 else
7135 offset += byte % UNITS_PER_WORD;
7137 else
7138 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7139 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7140 if (offset >= off
7141 && offset - off < len)
7142 ptr[offset - off] = value;
7144 return MIN (len, total_bytes - off);
7147 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7148 specified by EXPR into the buffer PTR of length LEN bytes.
7149 Return the number of bytes placed in the buffer, or zero
7150 upon failure. */
7152 static int
7153 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7155 int rsize, isize;
7156 tree part;
7158 part = TREE_REALPART (expr);
7159 rsize = native_encode_expr (part, ptr, len, off);
7160 if (off == -1
7161 && rsize == 0)
7162 return 0;
7163 part = TREE_IMAGPART (expr);
7164 if (off != -1)
7165 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7166 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7167 if (off == -1
7168 && isize != rsize)
7169 return 0;
7170 return rsize + isize;
7174 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7175 specified by EXPR into the buffer PTR of length LEN bytes.
7176 Return the number of bytes placed in the buffer, or zero
7177 upon failure. */
7179 static int
7180 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7182 unsigned i, count;
7183 int size, offset;
7184 tree itype, elem;
7186 offset = 0;
7187 count = VECTOR_CST_NELTS (expr);
7188 itype = TREE_TYPE (TREE_TYPE (expr));
7189 size = GET_MODE_SIZE (TYPE_MODE (itype));
7190 for (i = 0; i < count; i++)
7192 if (off >= size)
7194 off -= size;
7195 continue;
7197 elem = VECTOR_CST_ELT (expr, i);
7198 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7199 if ((off == -1 && res != size)
7200 || res == 0)
7201 return 0;
7202 offset += res;
7203 if (offset >= len)
7204 return offset;
7205 if (off != -1)
7206 off = 0;
7208 return offset;
7212 /* Subroutine of native_encode_expr. Encode the STRING_CST
7213 specified by EXPR into the buffer PTR of length LEN bytes.
7214 Return the number of bytes placed in the buffer, or zero
7215 upon failure. */
7217 static int
7218 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7220 tree type = TREE_TYPE (expr);
7221 HOST_WIDE_INT total_bytes;
7223 if (TREE_CODE (type) != ARRAY_TYPE
7224 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7225 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7226 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7227 return 0;
7228 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7229 if ((off == -1 && total_bytes > len)
7230 || off >= total_bytes)
7231 return 0;
7232 if (off == -1)
7233 off = 0;
7234 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7236 int written = 0;
7237 if (off < TREE_STRING_LENGTH (expr))
7239 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7240 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7242 memset (ptr + written, 0,
7243 MIN (total_bytes - written, len - written));
7245 else
7246 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7247 return MIN (total_bytes - off, len);
7251 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7252 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7253 buffer PTR of length LEN bytes. If OFF is not -1 then start
7254 the encoding at byte offset OFF and encode at most LEN bytes.
7255 Return the number of bytes placed in the buffer, or zero upon failure. */
7258 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7260 /* We don't support starting at negative offset and -1 is special. */
7261 if (off < -1)
7262 return 0;
7264 switch (TREE_CODE (expr))
7266 case INTEGER_CST:
7267 return native_encode_int (expr, ptr, len, off);
7269 case REAL_CST:
7270 return native_encode_real (expr, ptr, len, off);
7272 case FIXED_CST:
7273 return native_encode_fixed (expr, ptr, len, off);
7275 case COMPLEX_CST:
7276 return native_encode_complex (expr, ptr, len, off);
7278 case VECTOR_CST:
7279 return native_encode_vector (expr, ptr, len, off);
7281 case STRING_CST:
7282 return native_encode_string (expr, ptr, len, off);
7284 default:
7285 return 0;
7290 /* Subroutine of native_interpret_expr. Interpret the contents of
7291 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7292 If the buffer cannot be interpreted, return NULL_TREE. */
7294 static tree
7295 native_interpret_int (tree type, const unsigned char *ptr, int len)
7297 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7299 if (total_bytes > len
7300 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7301 return NULL_TREE;
7303 wide_int result = wi::from_buffer (ptr, total_bytes);
7305 return wide_int_to_tree (type, result);
7309 /* Subroutine of native_interpret_expr. Interpret the contents of
7310 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7311 If the buffer cannot be interpreted, return NULL_TREE. */
7313 static tree
7314 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7316 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7317 double_int result;
7318 FIXED_VALUE_TYPE fixed_value;
7320 if (total_bytes > len
7321 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7322 return NULL_TREE;
7324 result = double_int::from_buffer (ptr, total_bytes);
7325 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7327 return build_fixed (type, fixed_value);
7331 /* Subroutine of native_interpret_expr. Interpret the contents of
7332 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7333 If the buffer cannot be interpreted, return NULL_TREE. */
7335 static tree
7336 native_interpret_real (tree type, const unsigned char *ptr, int len)
7338 machine_mode mode = TYPE_MODE (type);
7339 int total_bytes = GET_MODE_SIZE (mode);
7340 unsigned char value;
7341 /* There are always 32 bits in each long, no matter the size of
7342 the hosts long. We handle floating point representations with
7343 up to 192 bits. */
7344 REAL_VALUE_TYPE r;
7345 long tmp[6];
7347 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7348 if (total_bytes > len || total_bytes > 24)
7349 return NULL_TREE;
7350 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7352 memset (tmp, 0, sizeof (tmp));
7353 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7354 bitpos += BITS_PER_UNIT)
7356 /* Both OFFSET and BYTE index within a long;
7357 bitpos indexes the whole float. */
7358 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7359 if (UNITS_PER_WORD < 4)
7361 int word = byte / UNITS_PER_WORD;
7362 if (WORDS_BIG_ENDIAN)
7363 word = (words - 1) - word;
7364 offset = word * UNITS_PER_WORD;
7365 if (BYTES_BIG_ENDIAN)
7366 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7367 else
7368 offset += byte % UNITS_PER_WORD;
7370 else
7372 offset = byte;
7373 if (BYTES_BIG_ENDIAN)
7375 /* Reverse bytes within each long, or within the entire float
7376 if it's smaller than a long (for HFmode). */
7377 offset = MIN (3, total_bytes - 1) - offset;
7378 gcc_assert (offset >= 0);
7381 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7383 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7386 real_from_target (&r, tmp, mode);
7387 return build_real (type, r);
7391 /* Subroutine of native_interpret_expr. Interpret the contents of
7392 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7393 If the buffer cannot be interpreted, return NULL_TREE. */
7395 static tree
7396 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7398 tree etype, rpart, ipart;
7399 int size;
7401 etype = TREE_TYPE (type);
7402 size = GET_MODE_SIZE (TYPE_MODE (etype));
7403 if (size * 2 > len)
7404 return NULL_TREE;
7405 rpart = native_interpret_expr (etype, ptr, size);
7406 if (!rpart)
7407 return NULL_TREE;
7408 ipart = native_interpret_expr (etype, ptr+size, size);
7409 if (!ipart)
7410 return NULL_TREE;
7411 return build_complex (type, rpart, ipart);
7415 /* Subroutine of native_interpret_expr. Interpret the contents of
7416 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7417 If the buffer cannot be interpreted, return NULL_TREE. */
7419 static tree
7420 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7422 tree etype, elem;
7423 int i, size, count;
7424 tree *elements;
7426 etype = TREE_TYPE (type);
7427 size = GET_MODE_SIZE (TYPE_MODE (etype));
7428 count = TYPE_VECTOR_SUBPARTS (type);
7429 if (size * count > len)
7430 return NULL_TREE;
7432 elements = XALLOCAVEC (tree, count);
7433 for (i = count - 1; i >= 0; i--)
7435 elem = native_interpret_expr (etype, ptr+(i*size), size);
7436 if (!elem)
7437 return NULL_TREE;
7438 elements[i] = elem;
7440 return build_vector (type, elements);
7444 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7445 the buffer PTR of length LEN as a constant of type TYPE. For
7446 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7447 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7448 return NULL_TREE. */
7450 tree
7451 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7453 switch (TREE_CODE (type))
7455 case INTEGER_TYPE:
7456 case ENUMERAL_TYPE:
7457 case BOOLEAN_TYPE:
7458 case POINTER_TYPE:
7459 case REFERENCE_TYPE:
7460 return native_interpret_int (type, ptr, len);
7462 case REAL_TYPE:
7463 return native_interpret_real (type, ptr, len);
7465 case FIXED_POINT_TYPE:
7466 return native_interpret_fixed (type, ptr, len);
7468 case COMPLEX_TYPE:
7469 return native_interpret_complex (type, ptr, len);
7471 case VECTOR_TYPE:
7472 return native_interpret_vector (type, ptr, len);
7474 default:
7475 return NULL_TREE;
7479 /* Returns true if we can interpret the contents of a native encoding
7480 as TYPE. */
7482 static bool
7483 can_native_interpret_type_p (tree type)
7485 switch (TREE_CODE (type))
7487 case INTEGER_TYPE:
7488 case ENUMERAL_TYPE:
7489 case BOOLEAN_TYPE:
7490 case POINTER_TYPE:
7491 case REFERENCE_TYPE:
7492 case FIXED_POINT_TYPE:
7493 case REAL_TYPE:
7494 case COMPLEX_TYPE:
7495 case VECTOR_TYPE:
7496 return true;
7497 default:
7498 return false;
7502 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7503 TYPE at compile-time. If we're unable to perform the conversion
7504 return NULL_TREE. */
7506 static tree
7507 fold_view_convert_expr (tree type, tree expr)
7509 /* We support up to 512-bit values (for V8DFmode). */
7510 unsigned char buffer[64];
7511 int len;
7513 /* Check that the host and target are sane. */
7514 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7515 return NULL_TREE;
7517 len = native_encode_expr (expr, buffer, sizeof (buffer));
7518 if (len == 0)
7519 return NULL_TREE;
7521 return native_interpret_expr (type, buffer, len);
7524 /* Build an expression for the address of T. Folds away INDIRECT_REF
7525 to avoid confusing the gimplify process. */
7527 tree
7528 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7530 /* The size of the object is not relevant when talking about its address. */
7531 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7532 t = TREE_OPERAND (t, 0);
7534 if (TREE_CODE (t) == INDIRECT_REF)
7536 t = TREE_OPERAND (t, 0);
7538 if (TREE_TYPE (t) != ptrtype)
7539 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7541 else if (TREE_CODE (t) == MEM_REF
7542 && integer_zerop (TREE_OPERAND (t, 1)))
7543 return TREE_OPERAND (t, 0);
7544 else if (TREE_CODE (t) == MEM_REF
7545 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7546 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7547 TREE_OPERAND (t, 0),
7548 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7549 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7551 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7553 if (TREE_TYPE (t) != ptrtype)
7554 t = fold_convert_loc (loc, ptrtype, t);
7556 else
7557 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7559 return t;
7562 /* Build an expression for the address of T. */
7564 tree
7565 build_fold_addr_expr_loc (location_t loc, tree t)
7567 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7569 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7572 /* Fold a unary expression of code CODE and type TYPE with operand
7573 OP0. Return the folded expression if folding is successful.
7574 Otherwise, return NULL_TREE. */
7576 tree
7577 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7579 tree tem;
7580 tree arg0;
7581 enum tree_code_class kind = TREE_CODE_CLASS (code);
7583 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7584 && TREE_CODE_LENGTH (code) == 1);
7586 arg0 = op0;
7587 if (arg0)
7589 if (CONVERT_EXPR_CODE_P (code)
7590 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7592 /* Don't use STRIP_NOPS, because signedness of argument type
7593 matters. */
7594 STRIP_SIGN_NOPS (arg0);
7596 else
7598 /* Strip any conversions that don't change the mode. This
7599 is safe for every expression, except for a comparison
7600 expression because its signedness is derived from its
7601 operands.
7603 Note that this is done as an internal manipulation within
7604 the constant folder, in order to find the simplest
7605 representation of the arguments so that their form can be
7606 studied. In any cases, the appropriate type conversions
7607 should be put back in the tree that will get out of the
7608 constant folder. */
7609 STRIP_NOPS (arg0);
7612 if (CONSTANT_CLASS_P (arg0))
7614 tree tem = const_unop (code, type, arg0);
7615 if (tem)
7617 if (TREE_TYPE (tem) != type)
7618 tem = fold_convert_loc (loc, type, tem);
7619 return tem;
7624 tem = generic_simplify (loc, code, type, op0);
7625 if (tem)
7626 return tem;
7628 if (TREE_CODE_CLASS (code) == tcc_unary)
7630 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7631 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7632 fold_build1_loc (loc, code, type,
7633 fold_convert_loc (loc, TREE_TYPE (op0),
7634 TREE_OPERAND (arg0, 1))));
7635 else if (TREE_CODE (arg0) == COND_EXPR)
7637 tree arg01 = TREE_OPERAND (arg0, 1);
7638 tree arg02 = TREE_OPERAND (arg0, 2);
7639 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7640 arg01 = fold_build1_loc (loc, code, type,
7641 fold_convert_loc (loc,
7642 TREE_TYPE (op0), arg01));
7643 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7644 arg02 = fold_build1_loc (loc, code, type,
7645 fold_convert_loc (loc,
7646 TREE_TYPE (op0), arg02));
7647 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7648 arg01, arg02);
7650 /* If this was a conversion, and all we did was to move into
7651 inside the COND_EXPR, bring it back out. But leave it if
7652 it is a conversion from integer to integer and the
7653 result precision is no wider than a word since such a
7654 conversion is cheap and may be optimized away by combine,
7655 while it couldn't if it were outside the COND_EXPR. Then return
7656 so we don't get into an infinite recursion loop taking the
7657 conversion out and then back in. */
7659 if ((CONVERT_EXPR_CODE_P (code)
7660 || code == NON_LVALUE_EXPR)
7661 && TREE_CODE (tem) == COND_EXPR
7662 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7663 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7664 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7665 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7666 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7667 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7668 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7669 && (INTEGRAL_TYPE_P
7670 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7671 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7672 || flag_syntax_only))
7673 tem = build1_loc (loc, code, type,
7674 build3 (COND_EXPR,
7675 TREE_TYPE (TREE_OPERAND
7676 (TREE_OPERAND (tem, 1), 0)),
7677 TREE_OPERAND (tem, 0),
7678 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7679 TREE_OPERAND (TREE_OPERAND (tem, 2),
7680 0)));
7681 return tem;
7685 switch (code)
7687 case NON_LVALUE_EXPR:
7688 if (!maybe_lvalue_p (op0))
7689 return fold_convert_loc (loc, type, op0);
7690 return NULL_TREE;
7692 CASE_CONVERT:
7693 case FLOAT_EXPR:
7694 case FIX_TRUNC_EXPR:
7695 if (COMPARISON_CLASS_P (op0))
7697 /* If we have (type) (a CMP b) and type is an integral type, return
7698 new expression involving the new type. Canonicalize
7699 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7700 non-integral type.
7701 Do not fold the result as that would not simplify further, also
7702 folding again results in recursions. */
7703 if (TREE_CODE (type) == BOOLEAN_TYPE)
7704 return build2_loc (loc, TREE_CODE (op0), type,
7705 TREE_OPERAND (op0, 0),
7706 TREE_OPERAND (op0, 1));
7707 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7708 && TREE_CODE (type) != VECTOR_TYPE)
7709 return build3_loc (loc, COND_EXPR, type, op0,
7710 constant_boolean_node (true, type),
7711 constant_boolean_node (false, type));
7714 /* Handle (T *)&A.B.C for A being of type T and B and C
7715 living at offset zero. This occurs frequently in
7716 C++ upcasting and then accessing the base. */
7717 if (TREE_CODE (op0) == ADDR_EXPR
7718 && POINTER_TYPE_P (type)
7719 && handled_component_p (TREE_OPERAND (op0, 0)))
7721 HOST_WIDE_INT bitsize, bitpos;
7722 tree offset;
7723 machine_mode mode;
7724 int unsignedp, reversep, volatilep;
7725 tree base
7726 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7727 &offset, &mode, &unsignedp, &reversep,
7728 &volatilep, false);
7729 /* If the reference was to a (constant) zero offset, we can use
7730 the address of the base if it has the same base type
7731 as the result type and the pointer type is unqualified. */
7732 if (! offset && bitpos == 0
7733 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7734 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7735 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7736 return fold_convert_loc (loc, type,
7737 build_fold_addr_expr_loc (loc, base));
7740 if (TREE_CODE (op0) == MODIFY_EXPR
7741 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7742 /* Detect assigning a bitfield. */
7743 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7744 && DECL_BIT_FIELD
7745 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7747 /* Don't leave an assignment inside a conversion
7748 unless assigning a bitfield. */
7749 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7750 /* First do the assignment, then return converted constant. */
7751 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7752 TREE_NO_WARNING (tem) = 1;
7753 TREE_USED (tem) = 1;
7754 return tem;
7757 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7758 constants (if x has signed type, the sign bit cannot be set
7759 in c). This folds extension into the BIT_AND_EXPR.
7760 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7761 very likely don't have maximal range for their precision and this
7762 transformation effectively doesn't preserve non-maximal ranges. */
7763 if (TREE_CODE (type) == INTEGER_TYPE
7764 && TREE_CODE (op0) == BIT_AND_EXPR
7765 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7767 tree and_expr = op0;
7768 tree and0 = TREE_OPERAND (and_expr, 0);
7769 tree and1 = TREE_OPERAND (and_expr, 1);
7770 int change = 0;
7772 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7773 || (TYPE_PRECISION (type)
7774 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7775 change = 1;
7776 else if (TYPE_PRECISION (TREE_TYPE (and1))
7777 <= HOST_BITS_PER_WIDE_INT
7778 && tree_fits_uhwi_p (and1))
7780 unsigned HOST_WIDE_INT cst;
7782 cst = tree_to_uhwi (and1);
7783 cst &= HOST_WIDE_INT_M1U
7784 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7785 change = (cst == 0);
7786 if (change
7787 && !flag_syntax_only
7788 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7789 == ZERO_EXTEND))
7791 tree uns = unsigned_type_for (TREE_TYPE (and0));
7792 and0 = fold_convert_loc (loc, uns, and0);
7793 and1 = fold_convert_loc (loc, uns, and1);
7796 if (change)
7798 tem = force_fit_type (type, wi::to_widest (and1), 0,
7799 TREE_OVERFLOW (and1));
7800 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7801 fold_convert_loc (loc, type, and0), tem);
7805 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7806 cast (T1)X will fold away. We assume that this happens when X itself
7807 is a cast. */
7808 if (POINTER_TYPE_P (type)
7809 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7810 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7812 tree arg00 = TREE_OPERAND (arg0, 0);
7813 tree arg01 = TREE_OPERAND (arg0, 1);
7815 return fold_build_pointer_plus_loc
7816 (loc, fold_convert_loc (loc, type, arg00), arg01);
7819 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7820 of the same precision, and X is an integer type not narrower than
7821 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7822 if (INTEGRAL_TYPE_P (type)
7823 && TREE_CODE (op0) == BIT_NOT_EXPR
7824 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7825 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7826 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7828 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7829 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7830 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7831 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7832 fold_convert_loc (loc, type, tem));
7835 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7836 type of X and Y (integer types only). */
7837 if (INTEGRAL_TYPE_P (type)
7838 && TREE_CODE (op0) == MULT_EXPR
7839 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7840 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7842 /* Be careful not to introduce new overflows. */
7843 tree mult_type;
7844 if (TYPE_OVERFLOW_WRAPS (type))
7845 mult_type = type;
7846 else
7847 mult_type = unsigned_type_for (type);
7849 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7851 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7852 fold_convert_loc (loc, mult_type,
7853 TREE_OPERAND (op0, 0)),
7854 fold_convert_loc (loc, mult_type,
7855 TREE_OPERAND (op0, 1)));
7856 return fold_convert_loc (loc, type, tem);
7860 return NULL_TREE;
7862 case VIEW_CONVERT_EXPR:
7863 if (TREE_CODE (op0) == MEM_REF)
7865 tem = fold_build2_loc (loc, MEM_REF, type,
7866 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7867 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7868 return tem;
7871 return NULL_TREE;
7873 case NEGATE_EXPR:
7874 tem = fold_negate_expr (loc, arg0);
7875 if (tem)
7876 return fold_convert_loc (loc, type, tem);
7877 return NULL_TREE;
7879 case ABS_EXPR:
7880 /* Convert fabs((double)float) into (double)fabsf(float). */
7881 if (TREE_CODE (arg0) == NOP_EXPR
7882 && TREE_CODE (type) == REAL_TYPE)
7884 tree targ0 = strip_float_extensions (arg0);
7885 if (targ0 != arg0)
7886 return fold_convert_loc (loc, type,
7887 fold_build1_loc (loc, ABS_EXPR,
7888 TREE_TYPE (targ0),
7889 targ0));
7891 return NULL_TREE;
7893 case BIT_NOT_EXPR:
7894 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7895 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7896 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7897 fold_convert_loc (loc, type,
7898 TREE_OPERAND (arg0, 0)))))
7899 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7900 fold_convert_loc (loc, type,
7901 TREE_OPERAND (arg0, 1)));
7902 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7903 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7904 fold_convert_loc (loc, type,
7905 TREE_OPERAND (arg0, 1)))))
7906 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7907 fold_convert_loc (loc, type,
7908 TREE_OPERAND (arg0, 0)), tem);
7910 return NULL_TREE;
7912 case TRUTH_NOT_EXPR:
7913 /* Note that the operand of this must be an int
7914 and its values must be 0 or 1.
7915 ("true" is a fixed value perhaps depending on the language,
7916 but we don't handle values other than 1 correctly yet.) */
7917 tem = fold_truth_not_expr (loc, arg0);
7918 if (!tem)
7919 return NULL_TREE;
7920 return fold_convert_loc (loc, type, tem);
7922 case INDIRECT_REF:
7923 /* Fold *&X to X if X is an lvalue. */
7924 if (TREE_CODE (op0) == ADDR_EXPR)
7926 tree op00 = TREE_OPERAND (op0, 0);
7927 if ((TREE_CODE (op00) == VAR_DECL
7928 || TREE_CODE (op00) == PARM_DECL
7929 || TREE_CODE (op00) == RESULT_DECL)
7930 && !TREE_READONLY (op00))
7931 return op00;
7933 return NULL_TREE;
7935 default:
7936 return NULL_TREE;
7937 } /* switch (code) */
7941 /* If the operation was a conversion do _not_ mark a resulting constant
7942 with TREE_OVERFLOW if the original constant was not. These conversions
7943 have implementation defined behavior and retaining the TREE_OVERFLOW
7944 flag here would confuse later passes such as VRP. */
7945 tree
7946 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7947 tree type, tree op0)
7949 tree res = fold_unary_loc (loc, code, type, op0);
7950 if (res
7951 && TREE_CODE (res) == INTEGER_CST
7952 && TREE_CODE (op0) == INTEGER_CST
7953 && CONVERT_EXPR_CODE_P (code))
7954 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7956 return res;
7959 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7960 operands OP0 and OP1. LOC is the location of the resulting expression.
7961 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7962 Return the folded expression if folding is successful. Otherwise,
7963 return NULL_TREE. */
7964 static tree
7965 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7966 tree arg0, tree arg1, tree op0, tree op1)
7968 tree tem;
7970 /* We only do these simplifications if we are optimizing. */
7971 if (!optimize)
7972 return NULL_TREE;
7974 /* Check for things like (A || B) && (A || C). We can convert this
7975 to A || (B && C). Note that either operator can be any of the four
7976 truth and/or operations and the transformation will still be
7977 valid. Also note that we only care about order for the
7978 ANDIF and ORIF operators. If B contains side effects, this
7979 might change the truth-value of A. */
7980 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7981 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7982 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7983 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7984 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7985 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7987 tree a00 = TREE_OPERAND (arg0, 0);
7988 tree a01 = TREE_OPERAND (arg0, 1);
7989 tree a10 = TREE_OPERAND (arg1, 0);
7990 tree a11 = TREE_OPERAND (arg1, 1);
7991 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7992 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7993 && (code == TRUTH_AND_EXPR
7994 || code == TRUTH_OR_EXPR));
7996 if (operand_equal_p (a00, a10, 0))
7997 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7998 fold_build2_loc (loc, code, type, a01, a11));
7999 else if (commutative && operand_equal_p (a00, a11, 0))
8000 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8001 fold_build2_loc (loc, code, type, a01, a10));
8002 else if (commutative && operand_equal_p (a01, a10, 0))
8003 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8004 fold_build2_loc (loc, code, type, a00, a11));
8006 /* This case if tricky because we must either have commutative
8007 operators or else A10 must not have side-effects. */
8009 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8010 && operand_equal_p (a01, a11, 0))
8011 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8012 fold_build2_loc (loc, code, type, a00, a10),
8013 a01);
8016 /* See if we can build a range comparison. */
8017 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8018 return tem;
8020 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8021 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8023 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8024 if (tem)
8025 return fold_build2_loc (loc, code, type, tem, arg1);
8028 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8029 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8031 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8032 if (tem)
8033 return fold_build2_loc (loc, code, type, arg0, tem);
8036 /* Check for the possibility of merging component references. If our
8037 lhs is another similar operation, try to merge its rhs with our
8038 rhs. Then try to merge our lhs and rhs. */
8039 if (TREE_CODE (arg0) == code
8040 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8041 TREE_OPERAND (arg0, 1), arg1)))
8042 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8044 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8045 return tem;
8047 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8048 && (code == TRUTH_AND_EXPR
8049 || code == TRUTH_ANDIF_EXPR
8050 || code == TRUTH_OR_EXPR
8051 || code == TRUTH_ORIF_EXPR))
8053 enum tree_code ncode, icode;
8055 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8056 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8057 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8059 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8060 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8061 We don't want to pack more than two leafs to a non-IF AND/OR
8062 expression.
8063 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8064 equal to IF-CODE, then we don't want to add right-hand operand.
8065 If the inner right-hand side of left-hand operand has
8066 side-effects, or isn't simple, then we can't add to it,
8067 as otherwise we might destroy if-sequence. */
8068 if (TREE_CODE (arg0) == icode
8069 && simple_operand_p_2 (arg1)
8070 /* Needed for sequence points to handle trappings, and
8071 side-effects. */
8072 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8074 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8075 arg1);
8076 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8077 tem);
8079 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8080 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8081 else if (TREE_CODE (arg1) == icode
8082 && simple_operand_p_2 (arg0)
8083 /* Needed for sequence points to handle trappings, and
8084 side-effects. */
8085 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8087 tem = fold_build2_loc (loc, ncode, type,
8088 arg0, TREE_OPERAND (arg1, 0));
8089 return fold_build2_loc (loc, icode, type, tem,
8090 TREE_OPERAND (arg1, 1));
8092 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8093 into (A OR B).
8094 For sequence point consistancy, we need to check for trapping,
8095 and side-effects. */
8096 else if (code == icode && simple_operand_p_2 (arg0)
8097 && simple_operand_p_2 (arg1))
8098 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8101 return NULL_TREE;
8104 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8105 by changing CODE to reduce the magnitude of constants involved in
8106 ARG0 of the comparison.
8107 Returns a canonicalized comparison tree if a simplification was
8108 possible, otherwise returns NULL_TREE.
8109 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8110 valid if signed overflow is undefined. */
8112 static tree
8113 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8114 tree arg0, tree arg1,
8115 bool *strict_overflow_p)
8117 enum tree_code code0 = TREE_CODE (arg0);
8118 tree t, cst0 = NULL_TREE;
8119 int sgn0;
8121 /* Match A +- CST code arg1. We can change this only if overflow
8122 is undefined. */
8123 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8124 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8125 /* In principle pointers also have undefined overflow behavior,
8126 but that causes problems elsewhere. */
8127 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8128 && (code0 == MINUS_EXPR
8129 || code0 == PLUS_EXPR)
8130 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8131 return NULL_TREE;
8133 /* Identify the constant in arg0 and its sign. */
8134 cst0 = TREE_OPERAND (arg0, 1);
8135 sgn0 = tree_int_cst_sgn (cst0);
8137 /* Overflowed constants and zero will cause problems. */
8138 if (integer_zerop (cst0)
8139 || TREE_OVERFLOW (cst0))
8140 return NULL_TREE;
8142 /* See if we can reduce the magnitude of the constant in
8143 arg0 by changing the comparison code. */
8144 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8145 if (code == LT_EXPR
8146 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8147 code = LE_EXPR;
8148 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8149 else if (code == GT_EXPR
8150 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8151 code = GE_EXPR;
8152 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8153 else if (code == LE_EXPR
8154 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8155 code = LT_EXPR;
8156 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8157 else if (code == GE_EXPR
8158 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8159 code = GT_EXPR;
8160 else
8161 return NULL_TREE;
8162 *strict_overflow_p = true;
8164 /* Now build the constant reduced in magnitude. But not if that
8165 would produce one outside of its types range. */
8166 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8167 && ((sgn0 == 1
8168 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8169 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8170 || (sgn0 == -1
8171 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8172 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8173 return NULL_TREE;
8175 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8176 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8177 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8178 t = fold_convert (TREE_TYPE (arg1), t);
8180 return fold_build2_loc (loc, code, type, t, arg1);
8183 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8184 overflow further. Try to decrease the magnitude of constants involved
8185 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8186 and put sole constants at the second argument position.
8187 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8189 static tree
8190 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8191 tree arg0, tree arg1)
8193 tree t;
8194 bool strict_overflow_p;
8195 const char * const warnmsg = G_("assuming signed overflow does not occur "
8196 "when reducing constant in comparison");
8198 /* Try canonicalization by simplifying arg0. */
8199 strict_overflow_p = false;
8200 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8201 &strict_overflow_p);
8202 if (t)
8204 if (strict_overflow_p)
8205 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8206 return t;
8209 /* Try canonicalization by simplifying arg1 using the swapped
8210 comparison. */
8211 code = swap_tree_comparison (code);
8212 strict_overflow_p = false;
8213 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8214 &strict_overflow_p);
8215 if (t && strict_overflow_p)
8216 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8217 return t;
8220 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8221 space. This is used to avoid issuing overflow warnings for
8222 expressions like &p->x which can not wrap. */
8224 static bool
8225 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8227 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8228 return true;
8230 if (bitpos < 0)
8231 return true;
8233 wide_int wi_offset;
8234 int precision = TYPE_PRECISION (TREE_TYPE (base));
8235 if (offset == NULL_TREE)
8236 wi_offset = wi::zero (precision);
8237 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8238 return true;
8239 else
8240 wi_offset = offset;
8242 bool overflow;
8243 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8244 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8245 if (overflow)
8246 return true;
8248 if (!wi::fits_uhwi_p (total))
8249 return true;
8251 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8252 if (size <= 0)
8253 return true;
8255 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8256 array. */
8257 if (TREE_CODE (base) == ADDR_EXPR)
8259 HOST_WIDE_INT base_size;
8261 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8262 if (base_size > 0 && size < base_size)
8263 size = base_size;
8266 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8269 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8270 kind INTEGER_CST. This makes sure to properly sign-extend the
8271 constant. */
8273 static HOST_WIDE_INT
8274 size_low_cst (const_tree t)
8276 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8277 int prec = TYPE_PRECISION (TREE_TYPE (t));
8278 if (prec < HOST_BITS_PER_WIDE_INT)
8279 return sext_hwi (w, prec);
8280 return w;
8283 /* Subroutine of fold_binary. This routine performs all of the
8284 transformations that are common to the equality/inequality
8285 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8286 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8287 fold_binary should call fold_binary. Fold a comparison with
8288 tree code CODE and type TYPE with operands OP0 and OP1. Return
8289 the folded comparison or NULL_TREE. */
8291 static tree
8292 fold_comparison (location_t loc, enum tree_code code, tree type,
8293 tree op0, tree op1)
8295 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8296 tree arg0, arg1, tem;
8298 arg0 = op0;
8299 arg1 = op1;
8301 STRIP_SIGN_NOPS (arg0);
8302 STRIP_SIGN_NOPS (arg1);
8304 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8305 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8306 && (equality_code
8307 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8308 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8309 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8310 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8311 && TREE_CODE (arg1) == INTEGER_CST
8312 && !TREE_OVERFLOW (arg1))
8314 const enum tree_code
8315 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8316 tree const1 = TREE_OPERAND (arg0, 1);
8317 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8318 tree variable = TREE_OPERAND (arg0, 0);
8319 tree new_const = int_const_binop (reverse_op, const2, const1);
8321 /* If the constant operation overflowed this can be
8322 simplified as a comparison against INT_MAX/INT_MIN. */
8323 if (TREE_OVERFLOW (new_const)
8324 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8326 int const1_sgn = tree_int_cst_sgn (const1);
8327 enum tree_code code2 = code;
8329 /* Get the sign of the constant on the lhs if the
8330 operation were VARIABLE + CONST1. */
8331 if (TREE_CODE (arg0) == MINUS_EXPR)
8332 const1_sgn = -const1_sgn;
8334 /* The sign of the constant determines if we overflowed
8335 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8336 Canonicalize to the INT_MIN overflow by swapping the comparison
8337 if necessary. */
8338 if (const1_sgn == -1)
8339 code2 = swap_tree_comparison (code);
8341 /* We now can look at the canonicalized case
8342 VARIABLE + 1 CODE2 INT_MIN
8343 and decide on the result. */
8344 switch (code2)
8346 case EQ_EXPR:
8347 case LT_EXPR:
8348 case LE_EXPR:
8349 return
8350 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8352 case NE_EXPR:
8353 case GE_EXPR:
8354 case GT_EXPR:
8355 return
8356 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8358 default:
8359 gcc_unreachable ();
8362 else
8364 if (!equality_code)
8365 fold_overflow_warning ("assuming signed overflow does not occur "
8366 "when changing X +- C1 cmp C2 to "
8367 "X cmp C2 -+ C1",
8368 WARN_STRICT_OVERFLOW_COMPARISON);
8369 return fold_build2_loc (loc, code, type, variable, new_const);
8373 /* For comparisons of pointers we can decompose it to a compile time
8374 comparison of the base objects and the offsets into the object.
8375 This requires at least one operand being an ADDR_EXPR or a
8376 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8377 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8378 && (TREE_CODE (arg0) == ADDR_EXPR
8379 || TREE_CODE (arg1) == ADDR_EXPR
8380 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8381 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8383 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8384 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8385 machine_mode mode;
8386 int volatilep, reversep, unsignedp;
8387 bool indirect_base0 = false, indirect_base1 = false;
8389 /* Get base and offset for the access. Strip ADDR_EXPR for
8390 get_inner_reference, but put it back by stripping INDIRECT_REF
8391 off the base object if possible. indirect_baseN will be true
8392 if baseN is not an address but refers to the object itself. */
8393 base0 = arg0;
8394 if (TREE_CODE (arg0) == ADDR_EXPR)
8396 base0
8397 = get_inner_reference (TREE_OPERAND (arg0, 0),
8398 &bitsize, &bitpos0, &offset0, &mode,
8399 &unsignedp, &reversep, &volatilep, false);
8400 if (TREE_CODE (base0) == INDIRECT_REF)
8401 base0 = TREE_OPERAND (base0, 0);
8402 else
8403 indirect_base0 = true;
8405 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8407 base0 = TREE_OPERAND (arg0, 0);
8408 STRIP_SIGN_NOPS (base0);
8409 if (TREE_CODE (base0) == ADDR_EXPR)
8411 base0 = TREE_OPERAND (base0, 0);
8412 indirect_base0 = true;
8414 offset0 = TREE_OPERAND (arg0, 1);
8415 if (tree_fits_shwi_p (offset0))
8417 HOST_WIDE_INT off = size_low_cst (offset0);
8418 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8419 * BITS_PER_UNIT)
8420 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8422 bitpos0 = off * BITS_PER_UNIT;
8423 offset0 = NULL_TREE;
8428 base1 = arg1;
8429 if (TREE_CODE (arg1) == ADDR_EXPR)
8431 base1
8432 = get_inner_reference (TREE_OPERAND (arg1, 0),
8433 &bitsize, &bitpos1, &offset1, &mode,
8434 &unsignedp, &reversep, &volatilep, false);
8435 if (TREE_CODE (base1) == INDIRECT_REF)
8436 base1 = TREE_OPERAND (base1, 0);
8437 else
8438 indirect_base1 = true;
8440 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8442 base1 = TREE_OPERAND (arg1, 0);
8443 STRIP_SIGN_NOPS (base1);
8444 if (TREE_CODE (base1) == ADDR_EXPR)
8446 base1 = TREE_OPERAND (base1, 0);
8447 indirect_base1 = true;
8449 offset1 = TREE_OPERAND (arg1, 1);
8450 if (tree_fits_shwi_p (offset1))
8452 HOST_WIDE_INT off = size_low_cst (offset1);
8453 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8454 * BITS_PER_UNIT)
8455 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8457 bitpos1 = off * BITS_PER_UNIT;
8458 offset1 = NULL_TREE;
8463 /* If we have equivalent bases we might be able to simplify. */
8464 if (indirect_base0 == indirect_base1
8465 && operand_equal_p (base0, base1,
8466 indirect_base0 ? OEP_ADDRESS_OF : 0))
8468 /* We can fold this expression to a constant if the non-constant
8469 offset parts are equal. */
8470 if ((offset0 == offset1
8471 || (offset0 && offset1
8472 && operand_equal_p (offset0, offset1, 0)))
8473 && (code == EQ_EXPR
8474 || code == NE_EXPR
8475 || (indirect_base0 && DECL_P (base0))
8476 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8479 if (!equality_code
8480 && bitpos0 != bitpos1
8481 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8482 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8483 fold_overflow_warning (("assuming pointer wraparound does not "
8484 "occur when comparing P +- C1 with "
8485 "P +- C2"),
8486 WARN_STRICT_OVERFLOW_CONDITIONAL);
8488 switch (code)
8490 case EQ_EXPR:
8491 return constant_boolean_node (bitpos0 == bitpos1, type);
8492 case NE_EXPR:
8493 return constant_boolean_node (bitpos0 != bitpos1, type);
8494 case LT_EXPR:
8495 return constant_boolean_node (bitpos0 < bitpos1, type);
8496 case LE_EXPR:
8497 return constant_boolean_node (bitpos0 <= bitpos1, type);
8498 case GE_EXPR:
8499 return constant_boolean_node (bitpos0 >= bitpos1, type);
8500 case GT_EXPR:
8501 return constant_boolean_node (bitpos0 > bitpos1, type);
8502 default:;
8505 /* We can simplify the comparison to a comparison of the variable
8506 offset parts if the constant offset parts are equal.
8507 Be careful to use signed sizetype here because otherwise we
8508 mess with array offsets in the wrong way. This is possible
8509 because pointer arithmetic is restricted to retain within an
8510 object and overflow on pointer differences is undefined as of
8511 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8512 else if (bitpos0 == bitpos1
8513 && (equality_code
8514 || (indirect_base0 && DECL_P (base0))
8515 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8517 /* By converting to signed sizetype we cover middle-end pointer
8518 arithmetic which operates on unsigned pointer types of size
8519 type size and ARRAY_REF offsets which are properly sign or
8520 zero extended from their type in case it is narrower than
8521 sizetype. */
8522 if (offset0 == NULL_TREE)
8523 offset0 = build_int_cst (ssizetype, 0);
8524 else
8525 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8526 if (offset1 == NULL_TREE)
8527 offset1 = build_int_cst (ssizetype, 0);
8528 else
8529 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8531 if (!equality_code
8532 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8533 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8534 fold_overflow_warning (("assuming pointer wraparound does not "
8535 "occur when comparing P +- C1 with "
8536 "P +- C2"),
8537 WARN_STRICT_OVERFLOW_COMPARISON);
8539 return fold_build2_loc (loc, code, type, offset0, offset1);
8542 /* For equal offsets we can simplify to a comparison of the
8543 base addresses. */
8544 else if (bitpos0 == bitpos1
8545 && (indirect_base0
8546 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8547 && (indirect_base1
8548 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8549 && ((offset0 == offset1)
8550 || (offset0 && offset1
8551 && operand_equal_p (offset0, offset1, 0))))
8553 if (indirect_base0)
8554 base0 = build_fold_addr_expr_loc (loc, base0);
8555 if (indirect_base1)
8556 base1 = build_fold_addr_expr_loc (loc, base1);
8557 return fold_build2_loc (loc, code, type, base0, base1);
8561 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8562 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8563 the resulting offset is smaller in absolute value than the
8564 original one and has the same sign. */
8565 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8566 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8567 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8568 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8569 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8570 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8571 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8572 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8574 tree const1 = TREE_OPERAND (arg0, 1);
8575 tree const2 = TREE_OPERAND (arg1, 1);
8576 tree variable1 = TREE_OPERAND (arg0, 0);
8577 tree variable2 = TREE_OPERAND (arg1, 0);
8578 tree cst;
8579 const char * const warnmsg = G_("assuming signed overflow does not "
8580 "occur when combining constants around "
8581 "a comparison");
8583 /* Put the constant on the side where it doesn't overflow and is
8584 of lower absolute value and of same sign than before. */
8585 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8586 ? MINUS_EXPR : PLUS_EXPR,
8587 const2, const1);
8588 if (!TREE_OVERFLOW (cst)
8589 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8590 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8592 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8593 return fold_build2_loc (loc, code, type,
8594 variable1,
8595 fold_build2_loc (loc, TREE_CODE (arg1),
8596 TREE_TYPE (arg1),
8597 variable2, cst));
8600 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8601 ? MINUS_EXPR : PLUS_EXPR,
8602 const1, const2);
8603 if (!TREE_OVERFLOW (cst)
8604 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8605 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8607 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8608 return fold_build2_loc (loc, code, type,
8609 fold_build2_loc (loc, TREE_CODE (arg0),
8610 TREE_TYPE (arg0),
8611 variable1, cst),
8612 variable2);
8616 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8617 if (tem)
8618 return tem;
8620 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8621 constant, we can simplify it. */
8622 if (TREE_CODE (arg1) == INTEGER_CST
8623 && (TREE_CODE (arg0) == MIN_EXPR
8624 || TREE_CODE (arg0) == MAX_EXPR)
8625 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8627 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8628 if (tem)
8629 return tem;
8632 /* If we are comparing an expression that just has comparisons
8633 of two integer values, arithmetic expressions of those comparisons,
8634 and constants, we can simplify it. There are only three cases
8635 to check: the two values can either be equal, the first can be
8636 greater, or the second can be greater. Fold the expression for
8637 those three values. Since each value must be 0 or 1, we have
8638 eight possibilities, each of which corresponds to the constant 0
8639 or 1 or one of the six possible comparisons.
8641 This handles common cases like (a > b) == 0 but also handles
8642 expressions like ((x > y) - (y > x)) > 0, which supposedly
8643 occur in macroized code. */
8645 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8647 tree cval1 = 0, cval2 = 0;
8648 int save_p = 0;
8650 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8651 /* Don't handle degenerate cases here; they should already
8652 have been handled anyway. */
8653 && cval1 != 0 && cval2 != 0
8654 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8655 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8656 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8657 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8658 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8659 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8660 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8662 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8663 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8665 /* We can't just pass T to eval_subst in case cval1 or cval2
8666 was the same as ARG1. */
8668 tree high_result
8669 = fold_build2_loc (loc, code, type,
8670 eval_subst (loc, arg0, cval1, maxval,
8671 cval2, minval),
8672 arg1);
8673 tree equal_result
8674 = fold_build2_loc (loc, code, type,
8675 eval_subst (loc, arg0, cval1, maxval,
8676 cval2, maxval),
8677 arg1);
8678 tree low_result
8679 = fold_build2_loc (loc, code, type,
8680 eval_subst (loc, arg0, cval1, minval,
8681 cval2, maxval),
8682 arg1);
8684 /* All three of these results should be 0 or 1. Confirm they are.
8685 Then use those values to select the proper code to use. */
8687 if (TREE_CODE (high_result) == INTEGER_CST
8688 && TREE_CODE (equal_result) == INTEGER_CST
8689 && TREE_CODE (low_result) == INTEGER_CST)
8691 /* Make a 3-bit mask with the high-order bit being the
8692 value for `>', the next for '=', and the low for '<'. */
8693 switch ((integer_onep (high_result) * 4)
8694 + (integer_onep (equal_result) * 2)
8695 + integer_onep (low_result))
8697 case 0:
8698 /* Always false. */
8699 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8700 case 1:
8701 code = LT_EXPR;
8702 break;
8703 case 2:
8704 code = EQ_EXPR;
8705 break;
8706 case 3:
8707 code = LE_EXPR;
8708 break;
8709 case 4:
8710 code = GT_EXPR;
8711 break;
8712 case 5:
8713 code = NE_EXPR;
8714 break;
8715 case 6:
8716 code = GE_EXPR;
8717 break;
8718 case 7:
8719 /* Always true. */
8720 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8723 if (save_p)
8725 tem = save_expr (build2 (code, type, cval1, cval2));
8726 SET_EXPR_LOCATION (tem, loc);
8727 return tem;
8729 return fold_build2_loc (loc, code, type, cval1, cval2);
8734 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8735 into a single range test. */
8736 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8737 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8738 && TREE_CODE (arg1) == INTEGER_CST
8739 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8740 && !integer_zerop (TREE_OPERAND (arg0, 1))
8741 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8742 && !TREE_OVERFLOW (arg1))
8744 tem = fold_div_compare (loc, code, type, arg0, arg1);
8745 if (tem != NULL_TREE)
8746 return tem;
8749 return NULL_TREE;
8753 /* Subroutine of fold_binary. Optimize complex multiplications of the
8754 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8755 argument EXPR represents the expression "z" of type TYPE. */
8757 static tree
8758 fold_mult_zconjz (location_t loc, tree type, tree expr)
8760 tree itype = TREE_TYPE (type);
8761 tree rpart, ipart, tem;
8763 if (TREE_CODE (expr) == COMPLEX_EXPR)
8765 rpart = TREE_OPERAND (expr, 0);
8766 ipart = TREE_OPERAND (expr, 1);
8768 else if (TREE_CODE (expr) == COMPLEX_CST)
8770 rpart = TREE_REALPART (expr);
8771 ipart = TREE_IMAGPART (expr);
8773 else
8775 expr = save_expr (expr);
8776 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8777 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8780 rpart = save_expr (rpart);
8781 ipart = save_expr (ipart);
8782 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8783 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8784 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8785 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8786 build_zero_cst (itype));
8790 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8791 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8793 static bool
8794 vec_cst_ctor_to_array (tree arg, tree *elts)
8796 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8798 if (TREE_CODE (arg) == VECTOR_CST)
8800 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8801 elts[i] = VECTOR_CST_ELT (arg, i);
8803 else if (TREE_CODE (arg) == CONSTRUCTOR)
8805 constructor_elt *elt;
8807 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8808 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8809 return false;
8810 else
8811 elts[i] = elt->value;
8813 else
8814 return false;
8815 for (; i < nelts; i++)
8816 elts[i]
8817 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8818 return true;
8821 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8822 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8823 NULL_TREE otherwise. */
8825 static tree
8826 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8828 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8829 tree *elts;
8830 bool need_ctor = false;
8832 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8833 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8834 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8835 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8836 return NULL_TREE;
8838 elts = XALLOCAVEC (tree, nelts * 3);
8839 if (!vec_cst_ctor_to_array (arg0, elts)
8840 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8841 return NULL_TREE;
8843 for (i = 0; i < nelts; i++)
8845 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8846 need_ctor = true;
8847 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8850 if (need_ctor)
8852 vec<constructor_elt, va_gc> *v;
8853 vec_alloc (v, nelts);
8854 for (i = 0; i < nelts; i++)
8855 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8856 return build_constructor (type, v);
8858 else
8859 return build_vector (type, &elts[2 * nelts]);
8862 /* Try to fold a pointer difference of type TYPE two address expressions of
8863 array references AREF0 and AREF1 using location LOC. Return a
8864 simplified expression for the difference or NULL_TREE. */
8866 static tree
8867 fold_addr_of_array_ref_difference (location_t loc, tree type,
8868 tree aref0, tree aref1)
8870 tree base0 = TREE_OPERAND (aref0, 0);
8871 tree base1 = TREE_OPERAND (aref1, 0);
8872 tree base_offset = build_int_cst (type, 0);
8874 /* If the bases are array references as well, recurse. If the bases
8875 are pointer indirections compute the difference of the pointers.
8876 If the bases are equal, we are set. */
8877 if ((TREE_CODE (base0) == ARRAY_REF
8878 && TREE_CODE (base1) == ARRAY_REF
8879 && (base_offset
8880 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8881 || (INDIRECT_REF_P (base0)
8882 && INDIRECT_REF_P (base1)
8883 && (base_offset
8884 = fold_binary_loc (loc, MINUS_EXPR, type,
8885 fold_convert (type, TREE_OPERAND (base0, 0)),
8886 fold_convert (type,
8887 TREE_OPERAND (base1, 0)))))
8888 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8890 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8891 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8892 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8893 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8894 return fold_build2_loc (loc, PLUS_EXPR, type,
8895 base_offset,
8896 fold_build2_loc (loc, MULT_EXPR, type,
8897 diff, esz));
8899 return NULL_TREE;
8902 /* If the real or vector real constant CST of type TYPE has an exact
8903 inverse, return it, else return NULL. */
8905 tree
8906 exact_inverse (tree type, tree cst)
8908 REAL_VALUE_TYPE r;
8909 tree unit_type, *elts;
8910 machine_mode mode;
8911 unsigned vec_nelts, i;
8913 switch (TREE_CODE (cst))
8915 case REAL_CST:
8916 r = TREE_REAL_CST (cst);
8918 if (exact_real_inverse (TYPE_MODE (type), &r))
8919 return build_real (type, r);
8921 return NULL_TREE;
8923 case VECTOR_CST:
8924 vec_nelts = VECTOR_CST_NELTS (cst);
8925 elts = XALLOCAVEC (tree, vec_nelts);
8926 unit_type = TREE_TYPE (type);
8927 mode = TYPE_MODE (unit_type);
8929 for (i = 0; i < vec_nelts; i++)
8931 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8932 if (!exact_real_inverse (mode, &r))
8933 return NULL_TREE;
8934 elts[i] = build_real (unit_type, r);
8937 return build_vector (type, elts);
8939 default:
8940 return NULL_TREE;
8944 /* Mask out the tz least significant bits of X of type TYPE where
8945 tz is the number of trailing zeroes in Y. */
8946 static wide_int
8947 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8949 int tz = wi::ctz (y);
8950 if (tz > 0)
8951 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8952 return x;
8955 /* Return true when T is an address and is known to be nonzero.
8956 For floating point we further ensure that T is not denormal.
8957 Similar logic is present in nonzero_address in rtlanal.h.
8959 If the return value is based on the assumption that signed overflow
8960 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8961 change *STRICT_OVERFLOW_P. */
8963 static bool
8964 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8966 tree type = TREE_TYPE (t);
8967 enum tree_code code;
8969 /* Doing something useful for floating point would need more work. */
8970 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8971 return false;
8973 code = TREE_CODE (t);
8974 switch (TREE_CODE_CLASS (code))
8976 case tcc_unary:
8977 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8978 strict_overflow_p);
8979 case tcc_binary:
8980 case tcc_comparison:
8981 return tree_binary_nonzero_warnv_p (code, type,
8982 TREE_OPERAND (t, 0),
8983 TREE_OPERAND (t, 1),
8984 strict_overflow_p);
8985 case tcc_constant:
8986 case tcc_declaration:
8987 case tcc_reference:
8988 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8990 default:
8991 break;
8994 switch (code)
8996 case TRUTH_NOT_EXPR:
8997 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8998 strict_overflow_p);
9000 case TRUTH_AND_EXPR:
9001 case TRUTH_OR_EXPR:
9002 case TRUTH_XOR_EXPR:
9003 return tree_binary_nonzero_warnv_p (code, type,
9004 TREE_OPERAND (t, 0),
9005 TREE_OPERAND (t, 1),
9006 strict_overflow_p);
9008 case COND_EXPR:
9009 case CONSTRUCTOR:
9010 case OBJ_TYPE_REF:
9011 case ASSERT_EXPR:
9012 case ADDR_EXPR:
9013 case WITH_SIZE_EXPR:
9014 case SSA_NAME:
9015 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9017 case COMPOUND_EXPR:
9018 case MODIFY_EXPR:
9019 case BIND_EXPR:
9020 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9021 strict_overflow_p);
9023 case SAVE_EXPR:
9024 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9025 strict_overflow_p);
9027 case CALL_EXPR:
9029 tree fndecl = get_callee_fndecl (t);
9030 if (!fndecl) return false;
9031 if (flag_delete_null_pointer_checks && !flag_check_new
9032 && DECL_IS_OPERATOR_NEW (fndecl)
9033 && !TREE_NOTHROW (fndecl))
9034 return true;
9035 if (flag_delete_null_pointer_checks
9036 && lookup_attribute ("returns_nonnull",
9037 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9038 return true;
9039 return alloca_call_p (t);
9042 default:
9043 break;
9045 return false;
9048 /* Return true when T is an address and is known to be nonzero.
9049 Handle warnings about undefined signed overflow. */
9051 static bool
9052 tree_expr_nonzero_p (tree t)
9054 bool ret, strict_overflow_p;
9056 strict_overflow_p = false;
9057 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9058 if (strict_overflow_p)
9059 fold_overflow_warning (("assuming signed overflow does not occur when "
9060 "determining that expression is always "
9061 "non-zero"),
9062 WARN_STRICT_OVERFLOW_MISC);
9063 return ret;
9066 /* Fold a binary expression of code CODE and type TYPE with operands
9067 OP0 and OP1. LOC is the location of the resulting expression.
9068 Return the folded expression if folding is successful. Otherwise,
9069 return NULL_TREE. */
9071 tree
9072 fold_binary_loc (location_t loc,
9073 enum tree_code code, tree type, tree op0, tree op1)
9075 enum tree_code_class kind = TREE_CODE_CLASS (code);
9076 tree arg0, arg1, tem;
9077 tree t1 = NULL_TREE;
9078 bool strict_overflow_p;
9079 unsigned int prec;
9081 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9082 && TREE_CODE_LENGTH (code) == 2
9083 && op0 != NULL_TREE
9084 && op1 != NULL_TREE);
9086 arg0 = op0;
9087 arg1 = op1;
9089 /* Strip any conversions that don't change the mode. This is
9090 safe for every expression, except for a comparison expression
9091 because its signedness is derived from its operands. So, in
9092 the latter case, only strip conversions that don't change the
9093 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9094 preserved.
9096 Note that this is done as an internal manipulation within the
9097 constant folder, in order to find the simplest representation
9098 of the arguments so that their form can be studied. In any
9099 cases, the appropriate type conversions should be put back in
9100 the tree that will get out of the constant folder. */
9102 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9104 STRIP_SIGN_NOPS (arg0);
9105 STRIP_SIGN_NOPS (arg1);
9107 else
9109 STRIP_NOPS (arg0);
9110 STRIP_NOPS (arg1);
9113 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9114 constant but we can't do arithmetic on them. */
9115 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9117 tem = const_binop (code, type, arg0, arg1);
9118 if (tem != NULL_TREE)
9120 if (TREE_TYPE (tem) != type)
9121 tem = fold_convert_loc (loc, type, tem);
9122 return tem;
9126 /* If this is a commutative operation, and ARG0 is a constant, move it
9127 to ARG1 to reduce the number of tests below. */
9128 if (commutative_tree_code (code)
9129 && tree_swap_operands_p (arg0, arg1, true))
9130 return fold_build2_loc (loc, code, type, op1, op0);
9132 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9133 to ARG1 to reduce the number of tests below. */
9134 if (kind == tcc_comparison
9135 && tree_swap_operands_p (arg0, arg1, true))
9136 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9138 tem = generic_simplify (loc, code, type, op0, op1);
9139 if (tem)
9140 return tem;
9142 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9144 First check for cases where an arithmetic operation is applied to a
9145 compound, conditional, or comparison operation. Push the arithmetic
9146 operation inside the compound or conditional to see if any folding
9147 can then be done. Convert comparison to conditional for this purpose.
9148 The also optimizes non-constant cases that used to be done in
9149 expand_expr.
9151 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9152 one of the operands is a comparison and the other is a comparison, a
9153 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9154 code below would make the expression more complex. Change it to a
9155 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9156 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9158 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9159 || code == EQ_EXPR || code == NE_EXPR)
9160 && TREE_CODE (type) != VECTOR_TYPE
9161 && ((truth_value_p (TREE_CODE (arg0))
9162 && (truth_value_p (TREE_CODE (arg1))
9163 || (TREE_CODE (arg1) == BIT_AND_EXPR
9164 && integer_onep (TREE_OPERAND (arg1, 1)))))
9165 || (truth_value_p (TREE_CODE (arg1))
9166 && (truth_value_p (TREE_CODE (arg0))
9167 || (TREE_CODE (arg0) == BIT_AND_EXPR
9168 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9170 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9171 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9172 : TRUTH_XOR_EXPR,
9173 boolean_type_node,
9174 fold_convert_loc (loc, boolean_type_node, arg0),
9175 fold_convert_loc (loc, boolean_type_node, arg1));
9177 if (code == EQ_EXPR)
9178 tem = invert_truthvalue_loc (loc, tem);
9180 return fold_convert_loc (loc, type, tem);
9183 if (TREE_CODE_CLASS (code) == tcc_binary
9184 || TREE_CODE_CLASS (code) == tcc_comparison)
9186 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9188 tem = fold_build2_loc (loc, code, type,
9189 fold_convert_loc (loc, TREE_TYPE (op0),
9190 TREE_OPERAND (arg0, 1)), op1);
9191 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9192 tem);
9194 if (TREE_CODE (arg1) == COMPOUND_EXPR
9195 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9197 tem = fold_build2_loc (loc, code, type, op0,
9198 fold_convert_loc (loc, TREE_TYPE (op1),
9199 TREE_OPERAND (arg1, 1)));
9200 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9201 tem);
9204 if (TREE_CODE (arg0) == COND_EXPR
9205 || TREE_CODE (arg0) == VEC_COND_EXPR
9206 || COMPARISON_CLASS_P (arg0))
9208 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9209 arg0, arg1,
9210 /*cond_first_p=*/1);
9211 if (tem != NULL_TREE)
9212 return tem;
9215 if (TREE_CODE (arg1) == COND_EXPR
9216 || TREE_CODE (arg1) == VEC_COND_EXPR
9217 || COMPARISON_CLASS_P (arg1))
9219 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9220 arg1, arg0,
9221 /*cond_first_p=*/0);
9222 if (tem != NULL_TREE)
9223 return tem;
9227 switch (code)
9229 case MEM_REF:
9230 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9231 if (TREE_CODE (arg0) == ADDR_EXPR
9232 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9234 tree iref = TREE_OPERAND (arg0, 0);
9235 return fold_build2 (MEM_REF, type,
9236 TREE_OPERAND (iref, 0),
9237 int_const_binop (PLUS_EXPR, arg1,
9238 TREE_OPERAND (iref, 1)));
9241 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9242 if (TREE_CODE (arg0) == ADDR_EXPR
9243 && handled_component_p (TREE_OPERAND (arg0, 0)))
9245 tree base;
9246 HOST_WIDE_INT coffset;
9247 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9248 &coffset);
9249 if (!base)
9250 return NULL_TREE;
9251 return fold_build2 (MEM_REF, type,
9252 build_fold_addr_expr (base),
9253 int_const_binop (PLUS_EXPR, arg1,
9254 size_int (coffset)));
9257 return NULL_TREE;
9259 case POINTER_PLUS_EXPR:
9260 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9261 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9262 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9263 return fold_convert_loc (loc, type,
9264 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9265 fold_convert_loc (loc, sizetype,
9266 arg1),
9267 fold_convert_loc (loc, sizetype,
9268 arg0)));
9270 return NULL_TREE;
9272 case PLUS_EXPR:
9273 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9275 /* X + (X / CST) * -CST is X % CST. */
9276 if (TREE_CODE (arg1) == MULT_EXPR
9277 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9278 && operand_equal_p (arg0,
9279 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9281 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9282 tree cst1 = TREE_OPERAND (arg1, 1);
9283 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9284 cst1, cst0);
9285 if (sum && integer_zerop (sum))
9286 return fold_convert_loc (loc, type,
9287 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9288 TREE_TYPE (arg0), arg0,
9289 cst0));
9293 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9294 one. Make sure the type is not saturating and has the signedness of
9295 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9296 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9297 if ((TREE_CODE (arg0) == MULT_EXPR
9298 || TREE_CODE (arg1) == MULT_EXPR)
9299 && !TYPE_SATURATING (type)
9300 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9301 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9302 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9304 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9305 if (tem)
9306 return tem;
9309 if (! FLOAT_TYPE_P (type))
9311 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9312 (plus (plus (mult) (mult)) (foo)) so that we can
9313 take advantage of the factoring cases below. */
9314 if (ANY_INTEGRAL_TYPE_P (type)
9315 && TYPE_OVERFLOW_WRAPS (type)
9316 && (((TREE_CODE (arg0) == PLUS_EXPR
9317 || TREE_CODE (arg0) == MINUS_EXPR)
9318 && TREE_CODE (arg1) == MULT_EXPR)
9319 || ((TREE_CODE (arg1) == PLUS_EXPR
9320 || TREE_CODE (arg1) == MINUS_EXPR)
9321 && TREE_CODE (arg0) == MULT_EXPR)))
9323 tree parg0, parg1, parg, marg;
9324 enum tree_code pcode;
9326 if (TREE_CODE (arg1) == MULT_EXPR)
9327 parg = arg0, marg = arg1;
9328 else
9329 parg = arg1, marg = arg0;
9330 pcode = TREE_CODE (parg);
9331 parg0 = TREE_OPERAND (parg, 0);
9332 parg1 = TREE_OPERAND (parg, 1);
9333 STRIP_NOPS (parg0);
9334 STRIP_NOPS (parg1);
9336 if (TREE_CODE (parg0) == MULT_EXPR
9337 && TREE_CODE (parg1) != MULT_EXPR)
9338 return fold_build2_loc (loc, pcode, type,
9339 fold_build2_loc (loc, PLUS_EXPR, type,
9340 fold_convert_loc (loc, type,
9341 parg0),
9342 fold_convert_loc (loc, type,
9343 marg)),
9344 fold_convert_loc (loc, type, parg1));
9345 if (TREE_CODE (parg0) != MULT_EXPR
9346 && TREE_CODE (parg1) == MULT_EXPR)
9347 return
9348 fold_build2_loc (loc, PLUS_EXPR, type,
9349 fold_convert_loc (loc, type, parg0),
9350 fold_build2_loc (loc, pcode, type,
9351 fold_convert_loc (loc, type, marg),
9352 fold_convert_loc (loc, type,
9353 parg1)));
9356 else
9358 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9359 to __complex__ ( x, y ). This is not the same for SNaNs or
9360 if signed zeros are involved. */
9361 if (!HONOR_SNANS (element_mode (arg0))
9362 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9363 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9365 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9366 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9367 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9368 bool arg0rz = false, arg0iz = false;
9369 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9370 || (arg0i && (arg0iz = real_zerop (arg0i))))
9372 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9373 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9374 if (arg0rz && arg1i && real_zerop (arg1i))
9376 tree rp = arg1r ? arg1r
9377 : build1 (REALPART_EXPR, rtype, arg1);
9378 tree ip = arg0i ? arg0i
9379 : build1 (IMAGPART_EXPR, rtype, arg0);
9380 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9382 else if (arg0iz && arg1r && real_zerop (arg1r))
9384 tree rp = arg0r ? arg0r
9385 : build1 (REALPART_EXPR, rtype, arg0);
9386 tree ip = arg1i ? arg1i
9387 : build1 (IMAGPART_EXPR, rtype, arg1);
9388 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9393 if (flag_unsafe_math_optimizations
9394 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9395 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9396 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9397 return tem;
9399 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9400 We associate floats only if the user has specified
9401 -fassociative-math. */
9402 if (flag_associative_math
9403 && TREE_CODE (arg1) == PLUS_EXPR
9404 && TREE_CODE (arg0) != MULT_EXPR)
9406 tree tree10 = TREE_OPERAND (arg1, 0);
9407 tree tree11 = TREE_OPERAND (arg1, 1);
9408 if (TREE_CODE (tree11) == MULT_EXPR
9409 && TREE_CODE (tree10) == MULT_EXPR)
9411 tree tree0;
9412 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9413 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9416 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9417 We associate floats only if the user has specified
9418 -fassociative-math. */
9419 if (flag_associative_math
9420 && TREE_CODE (arg0) == PLUS_EXPR
9421 && TREE_CODE (arg1) != MULT_EXPR)
9423 tree tree00 = TREE_OPERAND (arg0, 0);
9424 tree tree01 = TREE_OPERAND (arg0, 1);
9425 if (TREE_CODE (tree01) == MULT_EXPR
9426 && TREE_CODE (tree00) == MULT_EXPR)
9428 tree tree0;
9429 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9430 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9435 bit_rotate:
9436 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9437 is a rotate of A by C1 bits. */
9438 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9439 is a rotate of A by B bits. */
9441 enum tree_code code0, code1;
9442 tree rtype;
9443 code0 = TREE_CODE (arg0);
9444 code1 = TREE_CODE (arg1);
9445 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9446 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9447 && operand_equal_p (TREE_OPERAND (arg0, 0),
9448 TREE_OPERAND (arg1, 0), 0)
9449 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9450 TYPE_UNSIGNED (rtype))
9451 /* Only create rotates in complete modes. Other cases are not
9452 expanded properly. */
9453 && (element_precision (rtype)
9454 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9456 tree tree01, tree11;
9457 enum tree_code code01, code11;
9459 tree01 = TREE_OPERAND (arg0, 1);
9460 tree11 = TREE_OPERAND (arg1, 1);
9461 STRIP_NOPS (tree01);
9462 STRIP_NOPS (tree11);
9463 code01 = TREE_CODE (tree01);
9464 code11 = TREE_CODE (tree11);
9465 if (code01 == INTEGER_CST
9466 && code11 == INTEGER_CST
9467 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9468 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9470 tem = build2_loc (loc, LROTATE_EXPR,
9471 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9472 TREE_OPERAND (arg0, 0),
9473 code0 == LSHIFT_EXPR
9474 ? TREE_OPERAND (arg0, 1)
9475 : TREE_OPERAND (arg1, 1));
9476 return fold_convert_loc (loc, type, tem);
9478 else if (code11 == MINUS_EXPR)
9480 tree tree110, tree111;
9481 tree110 = TREE_OPERAND (tree11, 0);
9482 tree111 = TREE_OPERAND (tree11, 1);
9483 STRIP_NOPS (tree110);
9484 STRIP_NOPS (tree111);
9485 if (TREE_CODE (tree110) == INTEGER_CST
9486 && 0 == compare_tree_int (tree110,
9487 element_precision
9488 (TREE_TYPE (TREE_OPERAND
9489 (arg0, 0))))
9490 && operand_equal_p (tree01, tree111, 0))
9491 return
9492 fold_convert_loc (loc, type,
9493 build2 ((code0 == LSHIFT_EXPR
9494 ? LROTATE_EXPR
9495 : RROTATE_EXPR),
9496 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9497 TREE_OPERAND (arg0, 0),
9498 TREE_OPERAND (arg0, 1)));
9500 else if (code01 == MINUS_EXPR)
9502 tree tree010, tree011;
9503 tree010 = TREE_OPERAND (tree01, 0);
9504 tree011 = TREE_OPERAND (tree01, 1);
9505 STRIP_NOPS (tree010);
9506 STRIP_NOPS (tree011);
9507 if (TREE_CODE (tree010) == INTEGER_CST
9508 && 0 == compare_tree_int (tree010,
9509 element_precision
9510 (TREE_TYPE (TREE_OPERAND
9511 (arg0, 0))))
9512 && operand_equal_p (tree11, tree011, 0))
9513 return fold_convert_loc
9514 (loc, type,
9515 build2 ((code0 != LSHIFT_EXPR
9516 ? LROTATE_EXPR
9517 : RROTATE_EXPR),
9518 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9519 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9524 associate:
9525 /* In most languages, can't associate operations on floats through
9526 parentheses. Rather than remember where the parentheses were, we
9527 don't associate floats at all, unless the user has specified
9528 -fassociative-math.
9529 And, we need to make sure type is not saturating. */
9531 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9532 && !TYPE_SATURATING (type))
9534 tree var0, con0, lit0, minus_lit0;
9535 tree var1, con1, lit1, minus_lit1;
9536 tree atype = type;
9537 bool ok = true;
9539 /* Split both trees into variables, constants, and literals. Then
9540 associate each group together, the constants with literals,
9541 then the result with variables. This increases the chances of
9542 literals being recombined later and of generating relocatable
9543 expressions for the sum of a constant and literal. */
9544 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9545 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9546 code == MINUS_EXPR);
9548 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9549 if (code == MINUS_EXPR)
9550 code = PLUS_EXPR;
9552 /* With undefined overflow prefer doing association in a type
9553 which wraps on overflow, if that is one of the operand types. */
9554 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9555 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9557 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9558 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9559 atype = TREE_TYPE (arg0);
9560 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9561 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9562 atype = TREE_TYPE (arg1);
9563 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9566 /* With undefined overflow we can only associate constants with one
9567 variable, and constants whose association doesn't overflow. */
9568 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9569 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9571 if (var0 && var1)
9573 tree tmp0 = var0;
9574 tree tmp1 = var1;
9575 bool one_neg = false;
9577 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9579 tmp0 = TREE_OPERAND (tmp0, 0);
9580 one_neg = !one_neg;
9582 if (CONVERT_EXPR_P (tmp0)
9583 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9584 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9585 <= TYPE_PRECISION (atype)))
9586 tmp0 = TREE_OPERAND (tmp0, 0);
9587 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9589 tmp1 = TREE_OPERAND (tmp1, 0);
9590 one_neg = !one_neg;
9592 if (CONVERT_EXPR_P (tmp1)
9593 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9594 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9595 <= TYPE_PRECISION (atype)))
9596 tmp1 = TREE_OPERAND (tmp1, 0);
9597 /* The only case we can still associate with two variables
9598 is if they cancel out. */
9599 if (!one_neg
9600 || !operand_equal_p (tmp0, tmp1, 0))
9601 ok = false;
9605 /* Only do something if we found more than two objects. Otherwise,
9606 nothing has changed and we risk infinite recursion. */
9607 if (ok
9608 && (2 < ((var0 != 0) + (var1 != 0)
9609 + (con0 != 0) + (con1 != 0)
9610 + (lit0 != 0) + (lit1 != 0)
9611 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9613 bool any_overflows = false;
9614 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9615 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9616 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9617 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9618 var0 = associate_trees (loc, var0, var1, code, atype);
9619 con0 = associate_trees (loc, con0, con1, code, atype);
9620 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9621 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9622 code, atype);
9624 /* Preserve the MINUS_EXPR if the negative part of the literal is
9625 greater than the positive part. Otherwise, the multiplicative
9626 folding code (i.e extract_muldiv) may be fooled in case
9627 unsigned constants are subtracted, like in the following
9628 example: ((X*2 + 4) - 8U)/2. */
9629 if (minus_lit0 && lit0)
9631 if (TREE_CODE (lit0) == INTEGER_CST
9632 && TREE_CODE (minus_lit0) == INTEGER_CST
9633 && tree_int_cst_lt (lit0, minus_lit0))
9635 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9636 MINUS_EXPR, atype);
9637 lit0 = 0;
9639 else
9641 lit0 = associate_trees (loc, lit0, minus_lit0,
9642 MINUS_EXPR, atype);
9643 minus_lit0 = 0;
9647 /* Don't introduce overflows through reassociation. */
9648 if (!any_overflows
9649 && ((lit0 && TREE_OVERFLOW_P (lit0))
9650 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9651 return NULL_TREE;
9653 if (minus_lit0)
9655 if (con0 == 0)
9656 return
9657 fold_convert_loc (loc, type,
9658 associate_trees (loc, var0, minus_lit0,
9659 MINUS_EXPR, atype));
9660 else
9662 con0 = associate_trees (loc, con0, minus_lit0,
9663 MINUS_EXPR, atype);
9664 return
9665 fold_convert_loc (loc, type,
9666 associate_trees (loc, var0, con0,
9667 PLUS_EXPR, atype));
9671 con0 = associate_trees (loc, con0, lit0, code, atype);
9672 return
9673 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9674 code, atype));
9678 return NULL_TREE;
9680 case MINUS_EXPR:
9681 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9682 if (TREE_CODE (arg0) == NEGATE_EXPR
9683 && negate_expr_p (op1)
9684 && reorder_operands_p (arg0, arg1))
9685 return fold_build2_loc (loc, MINUS_EXPR, type,
9686 negate_expr (op1),
9687 fold_convert_loc (loc, type,
9688 TREE_OPERAND (arg0, 0)));
9690 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9691 __complex__ ( x, -y ). This is not the same for SNaNs or if
9692 signed zeros are involved. */
9693 if (!HONOR_SNANS (element_mode (arg0))
9694 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9695 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9697 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9698 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9699 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9700 bool arg0rz = false, arg0iz = false;
9701 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9702 || (arg0i && (arg0iz = real_zerop (arg0i))))
9704 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9705 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9706 if (arg0rz && arg1i && real_zerop (arg1i))
9708 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9709 arg1r ? arg1r
9710 : build1 (REALPART_EXPR, rtype, arg1));
9711 tree ip = arg0i ? arg0i
9712 : build1 (IMAGPART_EXPR, rtype, arg0);
9713 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9715 else if (arg0iz && arg1r && real_zerop (arg1r))
9717 tree rp = arg0r ? arg0r
9718 : build1 (REALPART_EXPR, rtype, arg0);
9719 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9720 arg1i ? arg1i
9721 : build1 (IMAGPART_EXPR, rtype, arg1));
9722 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9727 /* A - B -> A + (-B) if B is easily negatable. */
9728 if (negate_expr_p (op1)
9729 && ! TYPE_OVERFLOW_SANITIZED (type)
9730 && ((FLOAT_TYPE_P (type)
9731 /* Avoid this transformation if B is a positive REAL_CST. */
9732 && (TREE_CODE (op1) != REAL_CST
9733 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9734 || INTEGRAL_TYPE_P (type)))
9735 return fold_build2_loc (loc, PLUS_EXPR, type,
9736 fold_convert_loc (loc, type, arg0),
9737 negate_expr (op1));
9739 /* Fold &a[i] - &a[j] to i-j. */
9740 if (TREE_CODE (arg0) == ADDR_EXPR
9741 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9742 && TREE_CODE (arg1) == ADDR_EXPR
9743 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9745 tree tem = fold_addr_of_array_ref_difference (loc, type,
9746 TREE_OPERAND (arg0, 0),
9747 TREE_OPERAND (arg1, 0));
9748 if (tem)
9749 return tem;
9752 if (FLOAT_TYPE_P (type)
9753 && flag_unsafe_math_optimizations
9754 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9755 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9756 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9757 return tem;
9759 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9760 one. Make sure the type is not saturating and has the signedness of
9761 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9762 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9763 if ((TREE_CODE (arg0) == MULT_EXPR
9764 || TREE_CODE (arg1) == MULT_EXPR)
9765 && !TYPE_SATURATING (type)
9766 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9767 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9768 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9770 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9771 if (tem)
9772 return tem;
9775 goto associate;
9777 case MULT_EXPR:
9778 if (! FLOAT_TYPE_P (type))
9780 /* Transform x * -C into -x * C if x is easily negatable. */
9781 if (TREE_CODE (op1) == INTEGER_CST
9782 && tree_int_cst_sgn (op1) == -1
9783 && negate_expr_p (op0)
9784 && (tem = negate_expr (op1)) != op1
9785 && ! TREE_OVERFLOW (tem))
9786 return fold_build2_loc (loc, MULT_EXPR, type,
9787 fold_convert_loc (loc, type,
9788 negate_expr (op0)), tem);
9790 /* (A + A) * C -> A * 2 * C */
9791 if (TREE_CODE (arg0) == PLUS_EXPR
9792 && TREE_CODE (arg1) == INTEGER_CST
9793 && operand_equal_p (TREE_OPERAND (arg0, 0),
9794 TREE_OPERAND (arg0, 1), 0))
9795 return fold_build2_loc (loc, MULT_EXPR, type,
9796 omit_one_operand_loc (loc, type,
9797 TREE_OPERAND (arg0, 0),
9798 TREE_OPERAND (arg0, 1)),
9799 fold_build2_loc (loc, MULT_EXPR, type,
9800 build_int_cst (type, 2) , arg1));
9802 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
9803 sign-changing only. */
9804 if (TREE_CODE (arg1) == INTEGER_CST
9805 && TREE_CODE (arg0) == EXACT_DIV_EXPR
9806 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
9807 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9809 strict_overflow_p = false;
9810 if (TREE_CODE (arg1) == INTEGER_CST
9811 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9812 &strict_overflow_p)))
9814 if (strict_overflow_p)
9815 fold_overflow_warning (("assuming signed overflow does not "
9816 "occur when simplifying "
9817 "multiplication"),
9818 WARN_STRICT_OVERFLOW_MISC);
9819 return fold_convert_loc (loc, type, tem);
9822 /* Optimize z * conj(z) for integer complex numbers. */
9823 if (TREE_CODE (arg0) == CONJ_EXPR
9824 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9825 return fold_mult_zconjz (loc, type, arg1);
9826 if (TREE_CODE (arg1) == CONJ_EXPR
9827 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9828 return fold_mult_zconjz (loc, type, arg0);
9830 else
9832 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9833 This is not the same for NaNs or if signed zeros are
9834 involved. */
9835 if (!HONOR_NANS (arg0)
9836 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9837 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9838 && TREE_CODE (arg1) == COMPLEX_CST
9839 && real_zerop (TREE_REALPART (arg1)))
9841 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9842 if (real_onep (TREE_IMAGPART (arg1)))
9843 return
9844 fold_build2_loc (loc, COMPLEX_EXPR, type,
9845 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9846 rtype, arg0)),
9847 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9848 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9849 return
9850 fold_build2_loc (loc, COMPLEX_EXPR, type,
9851 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9852 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9853 rtype, arg0)));
9856 /* Optimize z * conj(z) for floating point complex numbers.
9857 Guarded by flag_unsafe_math_optimizations as non-finite
9858 imaginary components don't produce scalar results. */
9859 if (flag_unsafe_math_optimizations
9860 && TREE_CODE (arg0) == CONJ_EXPR
9861 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9862 return fold_mult_zconjz (loc, type, arg1);
9863 if (flag_unsafe_math_optimizations
9864 && TREE_CODE (arg1) == CONJ_EXPR
9865 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9866 return fold_mult_zconjz (loc, type, arg0);
9868 if (flag_unsafe_math_optimizations)
9871 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
9872 if (!in_gimple_form
9873 && optimize
9874 && operand_equal_p (arg0, arg1, 0))
9876 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9878 if (powfn)
9880 tree arg = build_real (type, dconst2);
9881 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
9886 goto associate;
9888 case BIT_IOR_EXPR:
9889 /* Canonicalize (X & C1) | C2. */
9890 if (TREE_CODE (arg0) == BIT_AND_EXPR
9891 && TREE_CODE (arg1) == INTEGER_CST
9892 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9894 int width = TYPE_PRECISION (type), w;
9895 wide_int c1 = TREE_OPERAND (arg0, 1);
9896 wide_int c2 = arg1;
9898 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9899 if ((c1 & c2) == c1)
9900 return omit_one_operand_loc (loc, type, arg1,
9901 TREE_OPERAND (arg0, 0));
9903 wide_int msk = wi::mask (width, false,
9904 TYPE_PRECISION (TREE_TYPE (arg1)));
9906 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9907 if (msk.and_not (c1 | c2) == 0)
9908 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9909 TREE_OPERAND (arg0, 0), arg1);
9911 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9912 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9913 mode which allows further optimizations. */
9914 c1 &= msk;
9915 c2 &= msk;
9916 wide_int c3 = c1.and_not (c2);
9917 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9919 wide_int mask = wi::mask (w, false,
9920 TYPE_PRECISION (type));
9921 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
9923 c3 = mask;
9924 break;
9928 if (c3 != c1)
9929 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9930 fold_build2_loc (loc, BIT_AND_EXPR, type,
9931 TREE_OPERAND (arg0, 0),
9932 wide_int_to_tree (type,
9933 c3)),
9934 arg1);
9937 /* See if this can be simplified into a rotate first. If that
9938 is unsuccessful continue in the association code. */
9939 goto bit_rotate;
9941 case BIT_XOR_EXPR:
9942 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9943 if (TREE_CODE (arg0) == BIT_AND_EXPR
9944 && INTEGRAL_TYPE_P (type)
9945 && integer_onep (TREE_OPERAND (arg0, 1))
9946 && integer_onep (arg1))
9947 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
9948 build_zero_cst (TREE_TYPE (arg0)));
9950 /* See if this can be simplified into a rotate first. If that
9951 is unsuccessful continue in the association code. */
9952 goto bit_rotate;
9954 case BIT_AND_EXPR:
9955 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9956 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9957 && INTEGRAL_TYPE_P (type)
9958 && integer_onep (TREE_OPERAND (arg0, 1))
9959 && integer_onep (arg1))
9961 tree tem2;
9962 tem = TREE_OPERAND (arg0, 0);
9963 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9964 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9965 tem, tem2);
9966 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9967 build_zero_cst (TREE_TYPE (tem)));
9969 /* Fold ~X & 1 as (X & 1) == 0. */
9970 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9971 && INTEGRAL_TYPE_P (type)
9972 && integer_onep (arg1))
9974 tree tem2;
9975 tem = TREE_OPERAND (arg0, 0);
9976 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9977 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9978 tem, tem2);
9979 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9980 build_zero_cst (TREE_TYPE (tem)));
9982 /* Fold !X & 1 as X == 0. */
9983 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9984 && integer_onep (arg1))
9986 tem = TREE_OPERAND (arg0, 0);
9987 return fold_build2_loc (loc, EQ_EXPR, type, tem,
9988 build_zero_cst (TREE_TYPE (tem)));
9991 /* Fold (X ^ Y) & Y as ~X & Y. */
9992 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9993 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9995 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9996 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9997 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
9998 fold_convert_loc (loc, type, arg1));
10000 /* Fold (X ^ Y) & X as ~Y & X. */
10001 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10002 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10003 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10005 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10006 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10007 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10008 fold_convert_loc (loc, type, arg1));
10010 /* Fold X & (X ^ Y) as X & ~Y. */
10011 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10012 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10014 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10015 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10016 fold_convert_loc (loc, type, arg0),
10017 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10019 /* Fold X & (Y ^ X) as ~Y & X. */
10020 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10021 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10022 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10024 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10025 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10026 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10027 fold_convert_loc (loc, type, arg0));
10030 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10031 multiple of 1 << CST. */
10032 if (TREE_CODE (arg1) == INTEGER_CST)
10034 wide_int cst1 = arg1;
10035 wide_int ncst1 = -cst1;
10036 if ((cst1 & ncst1) == ncst1
10037 && multiple_of_p (type, arg0,
10038 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10039 return fold_convert_loc (loc, type, arg0);
10042 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10043 bits from CST2. */
10044 if (TREE_CODE (arg1) == INTEGER_CST
10045 && TREE_CODE (arg0) == MULT_EXPR
10046 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10048 wide_int warg1 = arg1;
10049 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10051 if (masked == 0)
10052 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10053 arg0, arg1);
10054 else if (masked != warg1)
10056 /* Avoid the transform if arg1 is a mask of some
10057 mode which allows further optimizations. */
10058 int pop = wi::popcount (warg1);
10059 if (!(pop >= BITS_PER_UNIT
10060 && exact_log2 (pop) != -1
10061 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10062 return fold_build2_loc (loc, code, type, op0,
10063 wide_int_to_tree (type, masked));
10067 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10068 ((A & N) + B) & M -> (A + B) & M
10069 Similarly if (N & M) == 0,
10070 ((A | N) + B) & M -> (A + B) & M
10071 and for - instead of + (or unary - instead of +)
10072 and/or ^ instead of |.
10073 If B is constant and (B & M) == 0, fold into A & M. */
10074 if (TREE_CODE (arg1) == INTEGER_CST)
10076 wide_int cst1 = arg1;
10077 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10078 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10079 && (TREE_CODE (arg0) == PLUS_EXPR
10080 || TREE_CODE (arg0) == MINUS_EXPR
10081 || TREE_CODE (arg0) == NEGATE_EXPR)
10082 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10083 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10085 tree pmop[2];
10086 int which = 0;
10087 wide_int cst0;
10089 /* Now we know that arg0 is (C + D) or (C - D) or
10090 -C and arg1 (M) is == (1LL << cst) - 1.
10091 Store C into PMOP[0] and D into PMOP[1]. */
10092 pmop[0] = TREE_OPERAND (arg0, 0);
10093 pmop[1] = NULL;
10094 if (TREE_CODE (arg0) != NEGATE_EXPR)
10096 pmop[1] = TREE_OPERAND (arg0, 1);
10097 which = 1;
10100 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10101 which = -1;
10103 for (; which >= 0; which--)
10104 switch (TREE_CODE (pmop[which]))
10106 case BIT_AND_EXPR:
10107 case BIT_IOR_EXPR:
10108 case BIT_XOR_EXPR:
10109 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10110 != INTEGER_CST)
10111 break;
10112 cst0 = TREE_OPERAND (pmop[which], 1);
10113 cst0 &= cst1;
10114 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10116 if (cst0 != cst1)
10117 break;
10119 else if (cst0 != 0)
10120 break;
10121 /* If C or D is of the form (A & N) where
10122 (N & M) == M, or of the form (A | N) or
10123 (A ^ N) where (N & M) == 0, replace it with A. */
10124 pmop[which] = TREE_OPERAND (pmop[which], 0);
10125 break;
10126 case INTEGER_CST:
10127 /* If C or D is a N where (N & M) == 0, it can be
10128 omitted (assumed 0). */
10129 if ((TREE_CODE (arg0) == PLUS_EXPR
10130 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10131 && (cst1 & pmop[which]) == 0)
10132 pmop[which] = NULL;
10133 break;
10134 default:
10135 break;
10138 /* Only build anything new if we optimized one or both arguments
10139 above. */
10140 if (pmop[0] != TREE_OPERAND (arg0, 0)
10141 || (TREE_CODE (arg0) != NEGATE_EXPR
10142 && pmop[1] != TREE_OPERAND (arg0, 1)))
10144 tree utype = TREE_TYPE (arg0);
10145 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10147 /* Perform the operations in a type that has defined
10148 overflow behavior. */
10149 utype = unsigned_type_for (TREE_TYPE (arg0));
10150 if (pmop[0] != NULL)
10151 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10152 if (pmop[1] != NULL)
10153 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10156 if (TREE_CODE (arg0) == NEGATE_EXPR)
10157 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10158 else if (TREE_CODE (arg0) == PLUS_EXPR)
10160 if (pmop[0] != NULL && pmop[1] != NULL)
10161 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10162 pmop[0], pmop[1]);
10163 else if (pmop[0] != NULL)
10164 tem = pmop[0];
10165 else if (pmop[1] != NULL)
10166 tem = pmop[1];
10167 else
10168 return build_int_cst (type, 0);
10170 else if (pmop[0] == NULL)
10171 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10172 else
10173 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10174 pmop[0], pmop[1]);
10175 /* TEM is now the new binary +, - or unary - replacement. */
10176 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10177 fold_convert_loc (loc, utype, arg1));
10178 return fold_convert_loc (loc, type, tem);
10183 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10184 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10185 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10187 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10189 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10190 if (mask == -1)
10191 return
10192 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10195 goto associate;
10197 case RDIV_EXPR:
10198 /* Don't touch a floating-point divide by zero unless the mode
10199 of the constant can represent infinity. */
10200 if (TREE_CODE (arg1) == REAL_CST
10201 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10202 && real_zerop (arg1))
10203 return NULL_TREE;
10205 /* (-A) / (-B) -> A / B */
10206 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10207 return fold_build2_loc (loc, RDIV_EXPR, type,
10208 TREE_OPERAND (arg0, 0),
10209 negate_expr (arg1));
10210 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10211 return fold_build2_loc (loc, RDIV_EXPR, type,
10212 negate_expr (arg0),
10213 TREE_OPERAND (arg1, 0));
10214 return NULL_TREE;
10216 case TRUNC_DIV_EXPR:
10217 /* Fall through */
10219 case FLOOR_DIV_EXPR:
10220 /* Simplify A / (B << N) where A and B are positive and B is
10221 a power of 2, to A >> (N + log2(B)). */
10222 strict_overflow_p = false;
10223 if (TREE_CODE (arg1) == LSHIFT_EXPR
10224 && (TYPE_UNSIGNED (type)
10225 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10227 tree sval = TREE_OPERAND (arg1, 0);
10228 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10230 tree sh_cnt = TREE_OPERAND (arg1, 1);
10231 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10232 wi::exact_log2 (sval));
10234 if (strict_overflow_p)
10235 fold_overflow_warning (("assuming signed overflow does not "
10236 "occur when simplifying A / (B << N)"),
10237 WARN_STRICT_OVERFLOW_MISC);
10239 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10240 sh_cnt, pow2);
10241 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10242 fold_convert_loc (loc, type, arg0), sh_cnt);
10246 /* Fall through */
10248 case ROUND_DIV_EXPR:
10249 case CEIL_DIV_EXPR:
10250 case EXACT_DIV_EXPR:
10251 if (integer_zerop (arg1))
10252 return NULL_TREE;
10254 /* Convert -A / -B to A / B when the type is signed and overflow is
10255 undefined. */
10256 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10257 && TREE_CODE (arg0) == NEGATE_EXPR
10258 && negate_expr_p (op1))
10260 if (INTEGRAL_TYPE_P (type))
10261 fold_overflow_warning (("assuming signed overflow does not occur "
10262 "when distributing negation across "
10263 "division"),
10264 WARN_STRICT_OVERFLOW_MISC);
10265 return fold_build2_loc (loc, code, type,
10266 fold_convert_loc (loc, type,
10267 TREE_OPERAND (arg0, 0)),
10268 negate_expr (op1));
10270 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10271 && TREE_CODE (arg1) == NEGATE_EXPR
10272 && negate_expr_p (op0))
10274 if (INTEGRAL_TYPE_P (type))
10275 fold_overflow_warning (("assuming signed overflow does not occur "
10276 "when distributing negation across "
10277 "division"),
10278 WARN_STRICT_OVERFLOW_MISC);
10279 return fold_build2_loc (loc, code, type,
10280 negate_expr (op0),
10281 fold_convert_loc (loc, type,
10282 TREE_OPERAND (arg1, 0)));
10285 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10286 operation, EXACT_DIV_EXPR.
10288 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10289 At one time others generated faster code, it's not clear if they do
10290 after the last round to changes to the DIV code in expmed.c. */
10291 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10292 && multiple_of_p (type, arg0, arg1))
10293 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10294 fold_convert (type, arg0),
10295 fold_convert (type, arg1));
10297 strict_overflow_p = false;
10298 if (TREE_CODE (arg1) == INTEGER_CST
10299 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10300 &strict_overflow_p)))
10302 if (strict_overflow_p)
10303 fold_overflow_warning (("assuming signed overflow does not occur "
10304 "when simplifying division"),
10305 WARN_STRICT_OVERFLOW_MISC);
10306 return fold_convert_loc (loc, type, tem);
10309 return NULL_TREE;
10311 case CEIL_MOD_EXPR:
10312 case FLOOR_MOD_EXPR:
10313 case ROUND_MOD_EXPR:
10314 case TRUNC_MOD_EXPR:
10315 strict_overflow_p = false;
10316 if (TREE_CODE (arg1) == INTEGER_CST
10317 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10318 &strict_overflow_p)))
10320 if (strict_overflow_p)
10321 fold_overflow_warning (("assuming signed overflow does not occur "
10322 "when simplifying modulus"),
10323 WARN_STRICT_OVERFLOW_MISC);
10324 return fold_convert_loc (loc, type, tem);
10327 return NULL_TREE;
10329 case LROTATE_EXPR:
10330 case RROTATE_EXPR:
10331 case RSHIFT_EXPR:
10332 case LSHIFT_EXPR:
10333 /* Since negative shift count is not well-defined,
10334 don't try to compute it in the compiler. */
10335 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10336 return NULL_TREE;
10338 prec = element_precision (type);
10340 /* If we have a rotate of a bit operation with the rotate count and
10341 the second operand of the bit operation both constant,
10342 permute the two operations. */
10343 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10344 && (TREE_CODE (arg0) == BIT_AND_EXPR
10345 || TREE_CODE (arg0) == BIT_IOR_EXPR
10346 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10347 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10348 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10349 fold_build2_loc (loc, code, type,
10350 TREE_OPERAND (arg0, 0), arg1),
10351 fold_build2_loc (loc, code, type,
10352 TREE_OPERAND (arg0, 1), arg1));
10354 /* Two consecutive rotates adding up to the some integer
10355 multiple of the precision of the type can be ignored. */
10356 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10357 && TREE_CODE (arg0) == RROTATE_EXPR
10358 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10359 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10360 prec) == 0)
10361 return TREE_OPERAND (arg0, 0);
10363 return NULL_TREE;
10365 case MIN_EXPR:
10366 case MAX_EXPR:
10367 goto associate;
10369 case TRUTH_ANDIF_EXPR:
10370 /* Note that the operands of this must be ints
10371 and their values must be 0 or 1.
10372 ("true" is a fixed value perhaps depending on the language.) */
10373 /* If first arg is constant zero, return it. */
10374 if (integer_zerop (arg0))
10375 return fold_convert_loc (loc, type, arg0);
10376 case TRUTH_AND_EXPR:
10377 /* If either arg is constant true, drop it. */
10378 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10379 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10380 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10381 /* Preserve sequence points. */
10382 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10383 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10384 /* If second arg is constant zero, result is zero, but first arg
10385 must be evaluated. */
10386 if (integer_zerop (arg1))
10387 return omit_one_operand_loc (loc, type, arg1, arg0);
10388 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10389 case will be handled here. */
10390 if (integer_zerop (arg0))
10391 return omit_one_operand_loc (loc, type, arg0, arg1);
10393 /* !X && X is always false. */
10394 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10395 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10396 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10397 /* X && !X is always false. */
10398 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10399 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10400 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10402 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10403 means A >= Y && A != MAX, but in this case we know that
10404 A < X <= MAX. */
10406 if (!TREE_SIDE_EFFECTS (arg0)
10407 && !TREE_SIDE_EFFECTS (arg1))
10409 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10410 if (tem && !operand_equal_p (tem, arg0, 0))
10411 return fold_build2_loc (loc, code, type, tem, arg1);
10413 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10414 if (tem && !operand_equal_p (tem, arg1, 0))
10415 return fold_build2_loc (loc, code, type, arg0, tem);
10418 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10419 != NULL_TREE)
10420 return tem;
10422 return NULL_TREE;
10424 case TRUTH_ORIF_EXPR:
10425 /* Note that the operands of this must be ints
10426 and their values must be 0 or true.
10427 ("true" is a fixed value perhaps depending on the language.) */
10428 /* If first arg is constant true, return it. */
10429 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10430 return fold_convert_loc (loc, type, arg0);
10431 case TRUTH_OR_EXPR:
10432 /* If either arg is constant zero, drop it. */
10433 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10434 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10435 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10436 /* Preserve sequence points. */
10437 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10438 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10439 /* If second arg is constant true, result is true, but we must
10440 evaluate first arg. */
10441 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10442 return omit_one_operand_loc (loc, type, arg1, arg0);
10443 /* Likewise for first arg, but note this only occurs here for
10444 TRUTH_OR_EXPR. */
10445 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10446 return omit_one_operand_loc (loc, type, arg0, arg1);
10448 /* !X || X is always true. */
10449 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10450 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10451 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10452 /* X || !X is always true. */
10453 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10454 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10455 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10457 /* (X && !Y) || (!X && Y) is X ^ Y */
10458 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10459 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10461 tree a0, a1, l0, l1, n0, n1;
10463 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10464 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10466 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10467 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10469 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10470 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10472 if ((operand_equal_p (n0, a0, 0)
10473 && operand_equal_p (n1, a1, 0))
10474 || (operand_equal_p (n0, a1, 0)
10475 && operand_equal_p (n1, a0, 0)))
10476 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10479 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10480 != NULL_TREE)
10481 return tem;
10483 return NULL_TREE;
10485 case TRUTH_XOR_EXPR:
10486 /* If the second arg is constant zero, drop it. */
10487 if (integer_zerop (arg1))
10488 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10489 /* If the second arg is constant true, this is a logical inversion. */
10490 if (integer_onep (arg1))
10492 tem = invert_truthvalue_loc (loc, arg0);
10493 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10495 /* Identical arguments cancel to zero. */
10496 if (operand_equal_p (arg0, arg1, 0))
10497 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10499 /* !X ^ X is always true. */
10500 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10501 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10502 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10504 /* X ^ !X is always true. */
10505 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10506 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10507 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10509 return NULL_TREE;
10511 case EQ_EXPR:
10512 case NE_EXPR:
10513 STRIP_NOPS (arg0);
10514 STRIP_NOPS (arg1);
10516 tem = fold_comparison (loc, code, type, op0, op1);
10517 if (tem != NULL_TREE)
10518 return tem;
10520 /* bool_var != 1 becomes !bool_var. */
10521 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10522 && code == NE_EXPR)
10523 return fold_convert_loc (loc, type,
10524 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10525 TREE_TYPE (arg0), arg0));
10527 /* bool_var == 0 becomes !bool_var. */
10528 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10529 && code == EQ_EXPR)
10530 return fold_convert_loc (loc, type,
10531 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10532 TREE_TYPE (arg0), arg0));
10534 /* !exp != 0 becomes !exp */
10535 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10536 && code == NE_EXPR)
10537 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10539 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10540 if ((TREE_CODE (arg0) == PLUS_EXPR
10541 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10542 || TREE_CODE (arg0) == MINUS_EXPR)
10543 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10544 0)),
10545 arg1, 0)
10546 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10547 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10549 tree val = TREE_OPERAND (arg0, 1);
10550 return omit_two_operands_loc (loc, type,
10551 fold_build2_loc (loc, code, type,
10552 val,
10553 build_int_cst (TREE_TYPE (val),
10554 0)),
10555 TREE_OPERAND (arg0, 0), arg1);
10558 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10559 if (TREE_CODE (arg0) == MINUS_EXPR
10560 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10561 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10562 1)),
10563 arg1, 0)
10564 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10566 return omit_two_operands_loc (loc, type,
10567 code == NE_EXPR
10568 ? boolean_true_node : boolean_false_node,
10569 TREE_OPERAND (arg0, 1), arg1);
10572 /* If this is an EQ or NE comparison with zero and ARG0 is
10573 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10574 two operations, but the latter can be done in one less insn
10575 on machines that have only two-operand insns or on which a
10576 constant cannot be the first operand. */
10577 if (TREE_CODE (arg0) == BIT_AND_EXPR
10578 && integer_zerop (arg1))
10580 tree arg00 = TREE_OPERAND (arg0, 0);
10581 tree arg01 = TREE_OPERAND (arg0, 1);
10582 if (TREE_CODE (arg00) == LSHIFT_EXPR
10583 && integer_onep (TREE_OPERAND (arg00, 0)))
10585 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10586 arg01, TREE_OPERAND (arg00, 1));
10587 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10588 build_int_cst (TREE_TYPE (arg0), 1));
10589 return fold_build2_loc (loc, code, type,
10590 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10591 arg1);
10593 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10594 && integer_onep (TREE_OPERAND (arg01, 0)))
10596 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10597 arg00, TREE_OPERAND (arg01, 1));
10598 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10599 build_int_cst (TREE_TYPE (arg0), 1));
10600 return fold_build2_loc (loc, code, type,
10601 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10602 arg1);
10606 /* If this is an NE or EQ comparison of zero against the result of a
10607 signed MOD operation whose second operand is a power of 2, make
10608 the MOD operation unsigned since it is simpler and equivalent. */
10609 if (integer_zerop (arg1)
10610 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10611 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10612 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10613 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10614 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10615 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10617 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10618 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10619 fold_convert_loc (loc, newtype,
10620 TREE_OPERAND (arg0, 0)),
10621 fold_convert_loc (loc, newtype,
10622 TREE_OPERAND (arg0, 1)));
10624 return fold_build2_loc (loc, code, type, newmod,
10625 fold_convert_loc (loc, newtype, arg1));
10628 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10629 C1 is a valid shift constant, and C2 is a power of two, i.e.
10630 a single bit. */
10631 if (TREE_CODE (arg0) == BIT_AND_EXPR
10632 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10633 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10634 == INTEGER_CST
10635 && integer_pow2p (TREE_OPERAND (arg0, 1))
10636 && integer_zerop (arg1))
10638 tree itype = TREE_TYPE (arg0);
10639 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10640 prec = TYPE_PRECISION (itype);
10642 /* Check for a valid shift count. */
10643 if (wi::ltu_p (arg001, prec))
10645 tree arg01 = TREE_OPERAND (arg0, 1);
10646 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10647 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10648 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10649 can be rewritten as (X & (C2 << C1)) != 0. */
10650 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10652 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10653 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10654 return fold_build2_loc (loc, code, type, tem,
10655 fold_convert_loc (loc, itype, arg1));
10657 /* Otherwise, for signed (arithmetic) shifts,
10658 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10659 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10660 else if (!TYPE_UNSIGNED (itype))
10661 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10662 arg000, build_int_cst (itype, 0));
10663 /* Otherwise, of unsigned (logical) shifts,
10664 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10665 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10666 else
10667 return omit_one_operand_loc (loc, type,
10668 code == EQ_EXPR ? integer_one_node
10669 : integer_zero_node,
10670 arg000);
10674 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10675 Similarly for NE_EXPR. */
10676 if (TREE_CODE (arg0) == BIT_AND_EXPR
10677 && TREE_CODE (arg1) == INTEGER_CST
10678 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10680 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10681 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10682 TREE_OPERAND (arg0, 1));
10683 tree dandnotc
10684 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10685 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10686 notc);
10687 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10688 if (integer_nonzerop (dandnotc))
10689 return omit_one_operand_loc (loc, type, rslt, arg0);
10692 /* If this is a comparison of a field, we may be able to simplify it. */
10693 if ((TREE_CODE (arg0) == COMPONENT_REF
10694 || TREE_CODE (arg0) == BIT_FIELD_REF)
10695 /* Handle the constant case even without -O
10696 to make sure the warnings are given. */
10697 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10699 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10700 if (t1)
10701 return t1;
10704 /* Optimize comparisons of strlen vs zero to a compare of the
10705 first character of the string vs zero. To wit,
10706 strlen(ptr) == 0 => *ptr == 0
10707 strlen(ptr) != 0 => *ptr != 0
10708 Other cases should reduce to one of these two (or a constant)
10709 due to the return value of strlen being unsigned. */
10710 if (TREE_CODE (arg0) == CALL_EXPR
10711 && integer_zerop (arg1))
10713 tree fndecl = get_callee_fndecl (arg0);
10715 if (fndecl
10716 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10717 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10718 && call_expr_nargs (arg0) == 1
10719 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10721 tree iref = build_fold_indirect_ref_loc (loc,
10722 CALL_EXPR_ARG (arg0, 0));
10723 return fold_build2_loc (loc, code, type, iref,
10724 build_int_cst (TREE_TYPE (iref), 0));
10728 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10729 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10730 if (TREE_CODE (arg0) == RSHIFT_EXPR
10731 && integer_zerop (arg1)
10732 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10734 tree arg00 = TREE_OPERAND (arg0, 0);
10735 tree arg01 = TREE_OPERAND (arg0, 1);
10736 tree itype = TREE_TYPE (arg00);
10737 if (wi::eq_p (arg01, element_precision (itype) - 1))
10739 if (TYPE_UNSIGNED (itype))
10741 itype = signed_type_for (itype);
10742 arg00 = fold_convert_loc (loc, itype, arg00);
10744 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10745 type, arg00, build_zero_cst (itype));
10749 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10750 (X & C) == 0 when C is a single bit. */
10751 if (TREE_CODE (arg0) == BIT_AND_EXPR
10752 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10753 && integer_zerop (arg1)
10754 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10756 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10757 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10758 TREE_OPERAND (arg0, 1));
10759 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10760 type, tem,
10761 fold_convert_loc (loc, TREE_TYPE (arg0),
10762 arg1));
10765 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10766 constant C is a power of two, i.e. a single bit. */
10767 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10768 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10769 && integer_zerop (arg1)
10770 && integer_pow2p (TREE_OPERAND (arg0, 1))
10771 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10772 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10774 tree arg00 = TREE_OPERAND (arg0, 0);
10775 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10776 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10779 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10780 when is C is a power of two, i.e. a single bit. */
10781 if (TREE_CODE (arg0) == BIT_AND_EXPR
10782 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10783 && integer_zerop (arg1)
10784 && integer_pow2p (TREE_OPERAND (arg0, 1))
10785 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10786 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10788 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10789 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10790 arg000, TREE_OPERAND (arg0, 1));
10791 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10792 tem, build_int_cst (TREE_TYPE (tem), 0));
10795 if (integer_zerop (arg1)
10796 && tree_expr_nonzero_p (arg0))
10798 tree res = constant_boolean_node (code==NE_EXPR, type);
10799 return omit_one_operand_loc (loc, type, res, arg0);
10802 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10803 if (TREE_CODE (arg0) == BIT_AND_EXPR
10804 && TREE_CODE (arg1) == BIT_AND_EXPR)
10806 tree arg00 = TREE_OPERAND (arg0, 0);
10807 tree arg01 = TREE_OPERAND (arg0, 1);
10808 tree arg10 = TREE_OPERAND (arg1, 0);
10809 tree arg11 = TREE_OPERAND (arg1, 1);
10810 tree itype = TREE_TYPE (arg0);
10812 if (operand_equal_p (arg01, arg11, 0))
10813 return fold_build2_loc (loc, code, type,
10814 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10815 fold_build2_loc (loc,
10816 BIT_XOR_EXPR, itype,
10817 arg00, arg10),
10818 arg01),
10819 build_zero_cst (itype));
10821 if (operand_equal_p (arg01, arg10, 0))
10822 return fold_build2_loc (loc, code, type,
10823 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10824 fold_build2_loc (loc,
10825 BIT_XOR_EXPR, itype,
10826 arg00, arg11),
10827 arg01),
10828 build_zero_cst (itype));
10830 if (operand_equal_p (arg00, arg11, 0))
10831 return fold_build2_loc (loc, code, type,
10832 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10833 fold_build2_loc (loc,
10834 BIT_XOR_EXPR, itype,
10835 arg01, arg10),
10836 arg00),
10837 build_zero_cst (itype));
10839 if (operand_equal_p (arg00, arg10, 0))
10840 return fold_build2_loc (loc, code, type,
10841 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10842 fold_build2_loc (loc,
10843 BIT_XOR_EXPR, itype,
10844 arg01, arg11),
10845 arg00),
10846 build_zero_cst (itype));
10849 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10850 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10852 tree arg00 = TREE_OPERAND (arg0, 0);
10853 tree arg01 = TREE_OPERAND (arg0, 1);
10854 tree arg10 = TREE_OPERAND (arg1, 0);
10855 tree arg11 = TREE_OPERAND (arg1, 1);
10856 tree itype = TREE_TYPE (arg0);
10858 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10859 operand_equal_p guarantees no side-effects so we don't need
10860 to use omit_one_operand on Z. */
10861 if (operand_equal_p (arg01, arg11, 0))
10862 return fold_build2_loc (loc, code, type, arg00,
10863 fold_convert_loc (loc, TREE_TYPE (arg00),
10864 arg10));
10865 if (operand_equal_p (arg01, arg10, 0))
10866 return fold_build2_loc (loc, code, type, arg00,
10867 fold_convert_loc (loc, TREE_TYPE (arg00),
10868 arg11));
10869 if (operand_equal_p (arg00, arg11, 0))
10870 return fold_build2_loc (loc, code, type, arg01,
10871 fold_convert_loc (loc, TREE_TYPE (arg01),
10872 arg10));
10873 if (operand_equal_p (arg00, arg10, 0))
10874 return fold_build2_loc (loc, code, type, arg01,
10875 fold_convert_loc (loc, TREE_TYPE (arg01),
10876 arg11));
10878 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10879 if (TREE_CODE (arg01) == INTEGER_CST
10880 && TREE_CODE (arg11) == INTEGER_CST)
10882 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10883 fold_convert_loc (loc, itype, arg11));
10884 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10885 return fold_build2_loc (loc, code, type, tem,
10886 fold_convert_loc (loc, itype, arg10));
10890 /* Attempt to simplify equality/inequality comparisons of complex
10891 values. Only lower the comparison if the result is known or
10892 can be simplified to a single scalar comparison. */
10893 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10894 || TREE_CODE (arg0) == COMPLEX_CST)
10895 && (TREE_CODE (arg1) == COMPLEX_EXPR
10896 || TREE_CODE (arg1) == COMPLEX_CST))
10898 tree real0, imag0, real1, imag1;
10899 tree rcond, icond;
10901 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10903 real0 = TREE_OPERAND (arg0, 0);
10904 imag0 = TREE_OPERAND (arg0, 1);
10906 else
10908 real0 = TREE_REALPART (arg0);
10909 imag0 = TREE_IMAGPART (arg0);
10912 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10914 real1 = TREE_OPERAND (arg1, 0);
10915 imag1 = TREE_OPERAND (arg1, 1);
10917 else
10919 real1 = TREE_REALPART (arg1);
10920 imag1 = TREE_IMAGPART (arg1);
10923 rcond = fold_binary_loc (loc, code, type, real0, real1);
10924 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10926 if (integer_zerop (rcond))
10928 if (code == EQ_EXPR)
10929 return omit_two_operands_loc (loc, type, boolean_false_node,
10930 imag0, imag1);
10931 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10933 else
10935 if (code == NE_EXPR)
10936 return omit_two_operands_loc (loc, type, boolean_true_node,
10937 imag0, imag1);
10938 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10942 icond = fold_binary_loc (loc, code, type, imag0, imag1);
10943 if (icond && TREE_CODE (icond) == INTEGER_CST)
10945 if (integer_zerop (icond))
10947 if (code == EQ_EXPR)
10948 return omit_two_operands_loc (loc, type, boolean_false_node,
10949 real0, real1);
10950 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10952 else
10954 if (code == NE_EXPR)
10955 return omit_two_operands_loc (loc, type, boolean_true_node,
10956 real0, real1);
10957 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
10962 return NULL_TREE;
10964 case LT_EXPR:
10965 case GT_EXPR:
10966 case LE_EXPR:
10967 case GE_EXPR:
10968 tem = fold_comparison (loc, code, type, op0, op1);
10969 if (tem != NULL_TREE)
10970 return tem;
10972 /* Transform comparisons of the form X +- C CMP X. */
10973 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10974 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10975 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10976 && !HONOR_SNANS (arg0))
10977 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10978 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
10980 tree arg01 = TREE_OPERAND (arg0, 1);
10981 enum tree_code code0 = TREE_CODE (arg0);
10982 int is_positive;
10984 if (TREE_CODE (arg01) == REAL_CST)
10985 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10986 else
10987 is_positive = tree_int_cst_sgn (arg01);
10989 /* (X - c) > X becomes false. */
10990 if (code == GT_EXPR
10991 && ((code0 == MINUS_EXPR && is_positive >= 0)
10992 || (code0 == PLUS_EXPR && is_positive <= 0)))
10994 if (TREE_CODE (arg01) == INTEGER_CST
10995 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10996 fold_overflow_warning (("assuming signed overflow does not "
10997 "occur when assuming that (X - c) > X "
10998 "is always false"),
10999 WARN_STRICT_OVERFLOW_ALL);
11000 return constant_boolean_node (0, type);
11003 /* Likewise (X + c) < X becomes false. */
11004 if (code == LT_EXPR
11005 && ((code0 == PLUS_EXPR && is_positive >= 0)
11006 || (code0 == MINUS_EXPR && is_positive <= 0)))
11008 if (TREE_CODE (arg01) == INTEGER_CST
11009 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11010 fold_overflow_warning (("assuming signed overflow does not "
11011 "occur when assuming that "
11012 "(X + c) < X is always false"),
11013 WARN_STRICT_OVERFLOW_ALL);
11014 return constant_boolean_node (0, type);
11017 /* Convert (X - c) <= X to true. */
11018 if (!HONOR_NANS (arg1)
11019 && code == LE_EXPR
11020 && ((code0 == MINUS_EXPR && is_positive >= 0)
11021 || (code0 == PLUS_EXPR && is_positive <= 0)))
11023 if (TREE_CODE (arg01) == INTEGER_CST
11024 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11025 fold_overflow_warning (("assuming signed overflow does not "
11026 "occur when assuming that "
11027 "(X - c) <= X is always true"),
11028 WARN_STRICT_OVERFLOW_ALL);
11029 return constant_boolean_node (1, type);
11032 /* Convert (X + c) >= X to true. */
11033 if (!HONOR_NANS (arg1)
11034 && code == GE_EXPR
11035 && ((code0 == PLUS_EXPR && is_positive >= 0)
11036 || (code0 == MINUS_EXPR && is_positive <= 0)))
11038 if (TREE_CODE (arg01) == INTEGER_CST
11039 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11040 fold_overflow_warning (("assuming signed overflow does not "
11041 "occur when assuming that "
11042 "(X + c) >= X is always true"),
11043 WARN_STRICT_OVERFLOW_ALL);
11044 return constant_boolean_node (1, type);
11047 if (TREE_CODE (arg01) == INTEGER_CST)
11049 /* Convert X + c > X and X - c < X to true for integers. */
11050 if (code == GT_EXPR
11051 && ((code0 == PLUS_EXPR && is_positive > 0)
11052 || (code0 == MINUS_EXPR && is_positive < 0)))
11054 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11055 fold_overflow_warning (("assuming signed overflow does "
11056 "not occur when assuming that "
11057 "(X + c) > X is always true"),
11058 WARN_STRICT_OVERFLOW_ALL);
11059 return constant_boolean_node (1, type);
11062 if (code == LT_EXPR
11063 && ((code0 == MINUS_EXPR && is_positive > 0)
11064 || (code0 == PLUS_EXPR && is_positive < 0)))
11066 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11067 fold_overflow_warning (("assuming signed overflow does "
11068 "not occur when assuming that "
11069 "(X - c) < X is always true"),
11070 WARN_STRICT_OVERFLOW_ALL);
11071 return constant_boolean_node (1, type);
11074 /* Convert X + c <= X and X - c >= X to false for integers. */
11075 if (code == LE_EXPR
11076 && ((code0 == PLUS_EXPR && is_positive > 0)
11077 || (code0 == MINUS_EXPR && is_positive < 0)))
11079 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11080 fold_overflow_warning (("assuming signed overflow does "
11081 "not occur when assuming that "
11082 "(X + c) <= X is always false"),
11083 WARN_STRICT_OVERFLOW_ALL);
11084 return constant_boolean_node (0, type);
11087 if (code == GE_EXPR
11088 && ((code0 == MINUS_EXPR && is_positive > 0)
11089 || (code0 == PLUS_EXPR && is_positive < 0)))
11091 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11092 fold_overflow_warning (("assuming signed overflow does "
11093 "not occur when assuming that "
11094 "(X - c) >= X is always false"),
11095 WARN_STRICT_OVERFLOW_ALL);
11096 return constant_boolean_node (0, type);
11101 /* If we are comparing an ABS_EXPR with a constant, we can
11102 convert all the cases into explicit comparisons, but they may
11103 well not be faster than doing the ABS and one comparison.
11104 But ABS (X) <= C is a range comparison, which becomes a subtraction
11105 and a comparison, and is probably faster. */
11106 if (code == LE_EXPR
11107 && TREE_CODE (arg1) == INTEGER_CST
11108 && TREE_CODE (arg0) == ABS_EXPR
11109 && ! TREE_SIDE_EFFECTS (arg0)
11110 && (0 != (tem = negate_expr (arg1)))
11111 && TREE_CODE (tem) == INTEGER_CST
11112 && !TREE_OVERFLOW (tem))
11113 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11114 build2 (GE_EXPR, type,
11115 TREE_OPERAND (arg0, 0), tem),
11116 build2 (LE_EXPR, type,
11117 TREE_OPERAND (arg0, 0), arg1));
11119 /* Convert ABS_EXPR<x> >= 0 to true. */
11120 strict_overflow_p = false;
11121 if (code == GE_EXPR
11122 && (integer_zerop (arg1)
11123 || (! HONOR_NANS (arg0)
11124 && real_zerop (arg1)))
11125 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11127 if (strict_overflow_p)
11128 fold_overflow_warning (("assuming signed overflow does not occur "
11129 "when simplifying comparison of "
11130 "absolute value and zero"),
11131 WARN_STRICT_OVERFLOW_CONDITIONAL);
11132 return omit_one_operand_loc (loc, type,
11133 constant_boolean_node (true, type),
11134 arg0);
11137 /* Convert ABS_EXPR<x> < 0 to false. */
11138 strict_overflow_p = false;
11139 if (code == LT_EXPR
11140 && (integer_zerop (arg1) || real_zerop (arg1))
11141 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11143 if (strict_overflow_p)
11144 fold_overflow_warning (("assuming signed overflow does not occur "
11145 "when simplifying comparison of "
11146 "absolute value and zero"),
11147 WARN_STRICT_OVERFLOW_CONDITIONAL);
11148 return omit_one_operand_loc (loc, type,
11149 constant_boolean_node (false, type),
11150 arg0);
11153 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11154 and similarly for >= into !=. */
11155 if ((code == LT_EXPR || code == GE_EXPR)
11156 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11157 && TREE_CODE (arg1) == LSHIFT_EXPR
11158 && integer_onep (TREE_OPERAND (arg1, 0)))
11159 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11160 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11161 TREE_OPERAND (arg1, 1)),
11162 build_zero_cst (TREE_TYPE (arg0)));
11164 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11165 otherwise Y might be >= # of bits in X's type and thus e.g.
11166 (unsigned char) (1 << Y) for Y 15 might be 0.
11167 If the cast is widening, then 1 << Y should have unsigned type,
11168 otherwise if Y is number of bits in the signed shift type minus 1,
11169 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11170 31 might be 0xffffffff80000000. */
11171 if ((code == LT_EXPR || code == GE_EXPR)
11172 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11173 && CONVERT_EXPR_P (arg1)
11174 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11175 && (element_precision (TREE_TYPE (arg1))
11176 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11177 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11178 || (element_precision (TREE_TYPE (arg1))
11179 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11180 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11182 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11183 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11184 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11185 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11186 build_zero_cst (TREE_TYPE (arg0)));
11189 return NULL_TREE;
11191 case UNORDERED_EXPR:
11192 case ORDERED_EXPR:
11193 case UNLT_EXPR:
11194 case UNLE_EXPR:
11195 case UNGT_EXPR:
11196 case UNGE_EXPR:
11197 case UNEQ_EXPR:
11198 case LTGT_EXPR:
11199 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11201 tree targ0 = strip_float_extensions (arg0);
11202 tree targ1 = strip_float_extensions (arg1);
11203 tree newtype = TREE_TYPE (targ0);
11205 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11206 newtype = TREE_TYPE (targ1);
11208 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11209 return fold_build2_loc (loc, code, type,
11210 fold_convert_loc (loc, newtype, targ0),
11211 fold_convert_loc (loc, newtype, targ1));
11214 return NULL_TREE;
11216 case COMPOUND_EXPR:
11217 /* When pedantic, a compound expression can be neither an lvalue
11218 nor an integer constant expression. */
11219 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11220 return NULL_TREE;
11221 /* Don't let (0, 0) be null pointer constant. */
11222 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11223 : fold_convert_loc (loc, type, arg1);
11224 return pedantic_non_lvalue_loc (loc, tem);
11226 case ASSERT_EXPR:
11227 /* An ASSERT_EXPR should never be passed to fold_binary. */
11228 gcc_unreachable ();
11230 default:
11231 return NULL_TREE;
11232 } /* switch (code) */
11235 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11236 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11237 of GOTO_EXPR. */
11239 static tree
11240 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11242 switch (TREE_CODE (*tp))
11244 case LABEL_EXPR:
11245 return *tp;
11247 case GOTO_EXPR:
11248 *walk_subtrees = 0;
11250 /* ... fall through ... */
11252 default:
11253 return NULL_TREE;
11257 /* Return whether the sub-tree ST contains a label which is accessible from
11258 outside the sub-tree. */
11260 static bool
11261 contains_label_p (tree st)
11263 return
11264 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11267 /* Fold a ternary expression of code CODE and type TYPE with operands
11268 OP0, OP1, and OP2. Return the folded expression if folding is
11269 successful. Otherwise, return NULL_TREE. */
11271 tree
11272 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11273 tree op0, tree op1, tree op2)
11275 tree tem;
11276 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11277 enum tree_code_class kind = TREE_CODE_CLASS (code);
11279 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11280 && TREE_CODE_LENGTH (code) == 3);
11282 /* If this is a commutative operation, and OP0 is a constant, move it
11283 to OP1 to reduce the number of tests below. */
11284 if (commutative_ternary_tree_code (code)
11285 && tree_swap_operands_p (op0, op1, true))
11286 return fold_build3_loc (loc, code, type, op1, op0, op2);
11288 tem = generic_simplify (loc, code, type, op0, op1, op2);
11289 if (tem)
11290 return tem;
11292 /* Strip any conversions that don't change the mode. This is safe
11293 for every expression, except for a comparison expression because
11294 its signedness is derived from its operands. So, in the latter
11295 case, only strip conversions that don't change the signedness.
11297 Note that this is done as an internal manipulation within the
11298 constant folder, in order to find the simplest representation of
11299 the arguments so that their form can be studied. In any cases,
11300 the appropriate type conversions should be put back in the tree
11301 that will get out of the constant folder. */
11302 if (op0)
11304 arg0 = op0;
11305 STRIP_NOPS (arg0);
11308 if (op1)
11310 arg1 = op1;
11311 STRIP_NOPS (arg1);
11314 if (op2)
11316 arg2 = op2;
11317 STRIP_NOPS (arg2);
11320 switch (code)
11322 case COMPONENT_REF:
11323 if (TREE_CODE (arg0) == CONSTRUCTOR
11324 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11326 unsigned HOST_WIDE_INT idx;
11327 tree field, value;
11328 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11329 if (field == arg1)
11330 return value;
11332 return NULL_TREE;
11334 case COND_EXPR:
11335 case VEC_COND_EXPR:
11336 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11337 so all simple results must be passed through pedantic_non_lvalue. */
11338 if (TREE_CODE (arg0) == INTEGER_CST)
11340 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11341 tem = integer_zerop (arg0) ? op2 : op1;
11342 /* Only optimize constant conditions when the selected branch
11343 has the same type as the COND_EXPR. This avoids optimizing
11344 away "c ? x : throw", where the throw has a void type.
11345 Avoid throwing away that operand which contains label. */
11346 if ((!TREE_SIDE_EFFECTS (unused_op)
11347 || !contains_label_p (unused_op))
11348 && (! VOID_TYPE_P (TREE_TYPE (tem))
11349 || VOID_TYPE_P (type)))
11350 return pedantic_non_lvalue_loc (loc, tem);
11351 return NULL_TREE;
11353 else if (TREE_CODE (arg0) == VECTOR_CST)
11355 if ((TREE_CODE (arg1) == VECTOR_CST
11356 || TREE_CODE (arg1) == CONSTRUCTOR)
11357 && (TREE_CODE (arg2) == VECTOR_CST
11358 || TREE_CODE (arg2) == CONSTRUCTOR))
11360 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11361 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11362 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11363 for (i = 0; i < nelts; i++)
11365 tree val = VECTOR_CST_ELT (arg0, i);
11366 if (integer_all_onesp (val))
11367 sel[i] = i;
11368 else if (integer_zerop (val))
11369 sel[i] = nelts + i;
11370 else /* Currently unreachable. */
11371 return NULL_TREE;
11373 tree t = fold_vec_perm (type, arg1, arg2, sel);
11374 if (t != NULL_TREE)
11375 return t;
11379 /* If we have A op B ? A : C, we may be able to convert this to a
11380 simpler expression, depending on the operation and the values
11381 of B and C. Signed zeros prevent all of these transformations,
11382 for reasons given above each one.
11384 Also try swapping the arguments and inverting the conditional. */
11385 if (COMPARISON_CLASS_P (arg0)
11386 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11387 arg1, TREE_OPERAND (arg0, 1))
11388 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11390 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11391 if (tem)
11392 return tem;
11395 if (COMPARISON_CLASS_P (arg0)
11396 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11397 op2,
11398 TREE_OPERAND (arg0, 1))
11399 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11401 location_t loc0 = expr_location_or (arg0, loc);
11402 tem = fold_invert_truthvalue (loc0, arg0);
11403 if (tem && COMPARISON_CLASS_P (tem))
11405 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11406 if (tem)
11407 return tem;
11411 /* If the second operand is simpler than the third, swap them
11412 since that produces better jump optimization results. */
11413 if (truth_value_p (TREE_CODE (arg0))
11414 && tree_swap_operands_p (op1, op2, false))
11416 location_t loc0 = expr_location_or (arg0, loc);
11417 /* See if this can be inverted. If it can't, possibly because
11418 it was a floating-point inequality comparison, don't do
11419 anything. */
11420 tem = fold_invert_truthvalue (loc0, arg0);
11421 if (tem)
11422 return fold_build3_loc (loc, code, type, tem, op2, op1);
11425 /* Convert A ? 1 : 0 to simply A. */
11426 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11427 : (integer_onep (op1)
11428 && !VECTOR_TYPE_P (type)))
11429 && integer_zerop (op2)
11430 /* If we try to convert OP0 to our type, the
11431 call to fold will try to move the conversion inside
11432 a COND, which will recurse. In that case, the COND_EXPR
11433 is probably the best choice, so leave it alone. */
11434 && type == TREE_TYPE (arg0))
11435 return pedantic_non_lvalue_loc (loc, arg0);
11437 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11438 over COND_EXPR in cases such as floating point comparisons. */
11439 if (integer_zerop (op1)
11440 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
11441 : (integer_onep (op2)
11442 && !VECTOR_TYPE_P (type)))
11443 && truth_value_p (TREE_CODE (arg0)))
11444 return pedantic_non_lvalue_loc (loc,
11445 fold_convert_loc (loc, type,
11446 invert_truthvalue_loc (loc,
11447 arg0)));
11449 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11450 if (TREE_CODE (arg0) == LT_EXPR
11451 && integer_zerop (TREE_OPERAND (arg0, 1))
11452 && integer_zerop (op2)
11453 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11455 /* sign_bit_p looks through both zero and sign extensions,
11456 but for this optimization only sign extensions are
11457 usable. */
11458 tree tem2 = TREE_OPERAND (arg0, 0);
11459 while (tem != tem2)
11461 if (TREE_CODE (tem2) != NOP_EXPR
11462 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11464 tem = NULL_TREE;
11465 break;
11467 tem2 = TREE_OPERAND (tem2, 0);
11469 /* sign_bit_p only checks ARG1 bits within A's precision.
11470 If <sign bit of A> has wider type than A, bits outside
11471 of A's precision in <sign bit of A> need to be checked.
11472 If they are all 0, this optimization needs to be done
11473 in unsigned A's type, if they are all 1 in signed A's type,
11474 otherwise this can't be done. */
11475 if (tem
11476 && TYPE_PRECISION (TREE_TYPE (tem))
11477 < TYPE_PRECISION (TREE_TYPE (arg1))
11478 && TYPE_PRECISION (TREE_TYPE (tem))
11479 < TYPE_PRECISION (type))
11481 int inner_width, outer_width;
11482 tree tem_type;
11484 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11485 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11486 if (outer_width > TYPE_PRECISION (type))
11487 outer_width = TYPE_PRECISION (type);
11489 wide_int mask = wi::shifted_mask
11490 (inner_width, outer_width - inner_width, false,
11491 TYPE_PRECISION (TREE_TYPE (arg1)));
11493 wide_int common = mask & arg1;
11494 if (common == mask)
11496 tem_type = signed_type_for (TREE_TYPE (tem));
11497 tem = fold_convert_loc (loc, tem_type, tem);
11499 else if (common == 0)
11501 tem_type = unsigned_type_for (TREE_TYPE (tem));
11502 tem = fold_convert_loc (loc, tem_type, tem);
11504 else
11505 tem = NULL;
11508 if (tem)
11509 return
11510 fold_convert_loc (loc, type,
11511 fold_build2_loc (loc, BIT_AND_EXPR,
11512 TREE_TYPE (tem), tem,
11513 fold_convert_loc (loc,
11514 TREE_TYPE (tem),
11515 arg1)));
11518 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11519 already handled above. */
11520 if (TREE_CODE (arg0) == BIT_AND_EXPR
11521 && integer_onep (TREE_OPERAND (arg0, 1))
11522 && integer_zerop (op2)
11523 && integer_pow2p (arg1))
11525 tree tem = TREE_OPERAND (arg0, 0);
11526 STRIP_NOPS (tem);
11527 if (TREE_CODE (tem) == RSHIFT_EXPR
11528 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11529 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11530 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11531 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11532 TREE_OPERAND (tem, 0), arg1);
11535 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11536 is probably obsolete because the first operand should be a
11537 truth value (that's why we have the two cases above), but let's
11538 leave it in until we can confirm this for all front-ends. */
11539 if (integer_zerop (op2)
11540 && TREE_CODE (arg0) == NE_EXPR
11541 && integer_zerop (TREE_OPERAND (arg0, 1))
11542 && integer_pow2p (arg1)
11543 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11544 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11545 arg1, OEP_ONLY_CONST))
11546 return pedantic_non_lvalue_loc (loc,
11547 fold_convert_loc (loc, type,
11548 TREE_OPERAND (arg0, 0)));
11550 /* Disable the transformations below for vectors, since
11551 fold_binary_op_with_conditional_arg may undo them immediately,
11552 yielding an infinite loop. */
11553 if (code == VEC_COND_EXPR)
11554 return NULL_TREE;
11556 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11557 if (integer_zerop (op2)
11558 && truth_value_p (TREE_CODE (arg0))
11559 && truth_value_p (TREE_CODE (arg1))
11560 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11561 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11562 : TRUTH_ANDIF_EXPR,
11563 type, fold_convert_loc (loc, type, arg0), arg1);
11565 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11566 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11567 && truth_value_p (TREE_CODE (arg0))
11568 && truth_value_p (TREE_CODE (arg1))
11569 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11571 location_t loc0 = expr_location_or (arg0, loc);
11572 /* Only perform transformation if ARG0 is easily inverted. */
11573 tem = fold_invert_truthvalue (loc0, arg0);
11574 if (tem)
11575 return fold_build2_loc (loc, code == VEC_COND_EXPR
11576 ? BIT_IOR_EXPR
11577 : TRUTH_ORIF_EXPR,
11578 type, fold_convert_loc (loc, type, tem),
11579 arg1);
11582 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11583 if (integer_zerop (arg1)
11584 && truth_value_p (TREE_CODE (arg0))
11585 && truth_value_p (TREE_CODE (op2))
11586 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11588 location_t loc0 = expr_location_or (arg0, loc);
11589 /* Only perform transformation if ARG0 is easily inverted. */
11590 tem = fold_invert_truthvalue (loc0, arg0);
11591 if (tem)
11592 return fold_build2_loc (loc, code == VEC_COND_EXPR
11593 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11594 type, fold_convert_loc (loc, type, tem),
11595 op2);
11598 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11599 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11600 && truth_value_p (TREE_CODE (arg0))
11601 && truth_value_p (TREE_CODE (op2))
11602 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11603 return fold_build2_loc (loc, code == VEC_COND_EXPR
11604 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11605 type, fold_convert_loc (loc, type, arg0), op2);
11607 return NULL_TREE;
11609 case CALL_EXPR:
11610 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11611 of fold_ternary on them. */
11612 gcc_unreachable ();
11614 case BIT_FIELD_REF:
11615 if ((TREE_CODE (arg0) == VECTOR_CST
11616 || (TREE_CODE (arg0) == CONSTRUCTOR
11617 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
11618 && (type == TREE_TYPE (TREE_TYPE (arg0))
11619 || (TREE_CODE (type) == VECTOR_TYPE
11620 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11622 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11623 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11624 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11625 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11627 if (n != 0
11628 && (idx % width) == 0
11629 && (n % width) == 0
11630 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11632 idx = idx / width;
11633 n = n / width;
11635 if (TREE_CODE (arg0) == VECTOR_CST)
11637 if (n == 1)
11638 return VECTOR_CST_ELT (arg0, idx);
11640 tree *vals = XALLOCAVEC (tree, n);
11641 for (unsigned i = 0; i < n; ++i)
11642 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11643 return build_vector (type, vals);
11646 /* Constructor elements can be subvectors. */
11647 unsigned HOST_WIDE_INT k = 1;
11648 if (CONSTRUCTOR_NELTS (arg0) != 0)
11650 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
11651 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
11652 k = TYPE_VECTOR_SUBPARTS (cons_elem);
11655 /* We keep an exact subset of the constructor elements. */
11656 if ((idx % k) == 0 && (n % k) == 0)
11658 if (CONSTRUCTOR_NELTS (arg0) == 0)
11659 return build_constructor (type, NULL);
11660 idx /= k;
11661 n /= k;
11662 if (n == 1)
11664 if (idx < CONSTRUCTOR_NELTS (arg0))
11665 return CONSTRUCTOR_ELT (arg0, idx)->value;
11666 return build_zero_cst (type);
11669 vec<constructor_elt, va_gc> *vals;
11670 vec_alloc (vals, n);
11671 for (unsigned i = 0;
11672 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
11673 ++i)
11674 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
11675 CONSTRUCTOR_ELT
11676 (arg0, idx + i)->value);
11677 return build_constructor (type, vals);
11679 /* The bitfield references a single constructor element. */
11680 else if (idx + n <= (idx / k + 1) * k)
11682 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
11683 return build_zero_cst (type);
11684 else if (n == k)
11685 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
11686 else
11687 return fold_build3_loc (loc, code, type,
11688 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
11689 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
11694 /* A bit-field-ref that referenced the full argument can be stripped. */
11695 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11696 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
11697 && integer_zerop (op2))
11698 return fold_convert_loc (loc, type, arg0);
11700 /* On constants we can use native encode/interpret to constant
11701 fold (nearly) all BIT_FIELD_REFs. */
11702 if (CONSTANT_CLASS_P (arg0)
11703 && can_native_interpret_type_p (type)
11704 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
11705 /* This limitation should not be necessary, we just need to
11706 round this up to mode size. */
11707 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
11708 /* Need bit-shifting of the buffer to relax the following. */
11709 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
11711 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11712 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11713 unsigned HOST_WIDE_INT clen;
11714 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
11715 /* ??? We cannot tell native_encode_expr to start at
11716 some random byte only. So limit us to a reasonable amount
11717 of work. */
11718 if (clen <= 4096)
11720 unsigned char *b = XALLOCAVEC (unsigned char, clen);
11721 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
11722 if (len > 0
11723 && len * BITS_PER_UNIT >= bitpos + bitsize)
11725 tree v = native_interpret_expr (type,
11726 b + bitpos / BITS_PER_UNIT,
11727 bitsize / BITS_PER_UNIT);
11728 if (v)
11729 return v;
11734 return NULL_TREE;
11736 case FMA_EXPR:
11737 /* For integers we can decompose the FMA if possible. */
11738 if (TREE_CODE (arg0) == INTEGER_CST
11739 && TREE_CODE (arg1) == INTEGER_CST)
11740 return fold_build2_loc (loc, PLUS_EXPR, type,
11741 const_binop (MULT_EXPR, arg0, arg1), arg2);
11742 if (integer_zerop (arg2))
11743 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11745 return fold_fma (loc, type, arg0, arg1, arg2);
11747 case VEC_PERM_EXPR:
11748 if (TREE_CODE (arg2) == VECTOR_CST)
11750 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11751 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11752 unsigned char *sel2 = sel + nelts;
11753 bool need_mask_canon = false;
11754 bool need_mask_canon2 = false;
11755 bool all_in_vec0 = true;
11756 bool all_in_vec1 = true;
11757 bool maybe_identity = true;
11758 bool single_arg = (op0 == op1);
11759 bool changed = false;
11761 mask2 = 2 * nelts - 1;
11762 mask = single_arg ? (nelts - 1) : mask2;
11763 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11764 for (i = 0; i < nelts; i++)
11766 tree val = VECTOR_CST_ELT (arg2, i);
11767 if (TREE_CODE (val) != INTEGER_CST)
11768 return NULL_TREE;
11770 /* Make sure that the perm value is in an acceptable
11771 range. */
11772 wide_int t = val;
11773 need_mask_canon |= wi::gtu_p (t, mask);
11774 need_mask_canon2 |= wi::gtu_p (t, mask2);
11775 sel[i] = t.to_uhwi () & mask;
11776 sel2[i] = t.to_uhwi () & mask2;
11778 if (sel[i] < nelts)
11779 all_in_vec1 = false;
11780 else
11781 all_in_vec0 = false;
11783 if ((sel[i] & (nelts-1)) != i)
11784 maybe_identity = false;
11787 if (maybe_identity)
11789 if (all_in_vec0)
11790 return op0;
11791 if (all_in_vec1)
11792 return op1;
11795 if (all_in_vec0)
11796 op1 = op0;
11797 else if (all_in_vec1)
11799 op0 = op1;
11800 for (i = 0; i < nelts; i++)
11801 sel[i] -= nelts;
11802 need_mask_canon = true;
11805 if ((TREE_CODE (op0) == VECTOR_CST
11806 || TREE_CODE (op0) == CONSTRUCTOR)
11807 && (TREE_CODE (op1) == VECTOR_CST
11808 || TREE_CODE (op1) == CONSTRUCTOR))
11810 tree t = fold_vec_perm (type, op0, op1, sel);
11811 if (t != NULL_TREE)
11812 return t;
11815 if (op0 == op1 && !single_arg)
11816 changed = true;
11818 /* Some targets are deficient and fail to expand a single
11819 argument permutation while still allowing an equivalent
11820 2-argument version. */
11821 if (need_mask_canon && arg2 == op2
11822 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11823 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11825 need_mask_canon = need_mask_canon2;
11826 sel = sel2;
11829 if (need_mask_canon && arg2 == op2)
11831 tree *tsel = XALLOCAVEC (tree, nelts);
11832 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11833 for (i = 0; i < nelts; i++)
11834 tsel[i] = build_int_cst (eltype, sel[i]);
11835 op2 = build_vector (TREE_TYPE (arg2), tsel);
11836 changed = true;
11839 if (changed)
11840 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11842 return NULL_TREE;
11844 default:
11845 return NULL_TREE;
11846 } /* switch (code) */
11849 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11850 of an array (or vector). */
11852 tree
11853 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11855 tree index_type = NULL_TREE;
11856 offset_int low_bound = 0;
11858 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11860 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11861 if (domain_type && TYPE_MIN_VALUE (domain_type))
11863 /* Static constructors for variably sized objects makes no sense. */
11864 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11865 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11866 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11870 if (index_type)
11871 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11872 TYPE_SIGN (index_type));
11874 offset_int index = low_bound - 1;
11875 if (index_type)
11876 index = wi::ext (index, TYPE_PRECISION (index_type),
11877 TYPE_SIGN (index_type));
11879 offset_int max_index;
11880 unsigned HOST_WIDE_INT cnt;
11881 tree cfield, cval;
11883 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11885 /* Array constructor might explicitly set index, or specify a range,
11886 or leave index NULL meaning that it is next index after previous
11887 one. */
11888 if (cfield)
11890 if (TREE_CODE (cfield) == INTEGER_CST)
11891 max_index = index = wi::to_offset (cfield);
11892 else
11894 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11895 index = wi::to_offset (TREE_OPERAND (cfield, 0));
11896 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11899 else
11901 index += 1;
11902 if (index_type)
11903 index = wi::ext (index, TYPE_PRECISION (index_type),
11904 TYPE_SIGN (index_type));
11905 max_index = index;
11908 /* Do we have match? */
11909 if (wi::cmpu (access_index, index) >= 0
11910 && wi::cmpu (access_index, max_index) <= 0)
11911 return cval;
11913 return NULL_TREE;
11916 /* Perform constant folding and related simplification of EXPR.
11917 The related simplifications include x*1 => x, x*0 => 0, etc.,
11918 and application of the associative law.
11919 NOP_EXPR conversions may be removed freely (as long as we
11920 are careful not to change the type of the overall expression).
11921 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11922 but we can constant-fold them if they have constant operands. */
11924 #ifdef ENABLE_FOLD_CHECKING
11925 # define fold(x) fold_1 (x)
11926 static tree fold_1 (tree);
11927 static
11928 #endif
11929 tree
11930 fold (tree expr)
11932 const tree t = expr;
11933 enum tree_code code = TREE_CODE (t);
11934 enum tree_code_class kind = TREE_CODE_CLASS (code);
11935 tree tem;
11936 location_t loc = EXPR_LOCATION (expr);
11938 /* Return right away if a constant. */
11939 if (kind == tcc_constant)
11940 return t;
11942 /* CALL_EXPR-like objects with variable numbers of operands are
11943 treated specially. */
11944 if (kind == tcc_vl_exp)
11946 if (code == CALL_EXPR)
11948 tem = fold_call_expr (loc, expr, false);
11949 return tem ? tem : expr;
11951 return expr;
11954 if (IS_EXPR_CODE_CLASS (kind))
11956 tree type = TREE_TYPE (t);
11957 tree op0, op1, op2;
11959 switch (TREE_CODE_LENGTH (code))
11961 case 1:
11962 op0 = TREE_OPERAND (t, 0);
11963 tem = fold_unary_loc (loc, code, type, op0);
11964 return tem ? tem : expr;
11965 case 2:
11966 op0 = TREE_OPERAND (t, 0);
11967 op1 = TREE_OPERAND (t, 1);
11968 tem = fold_binary_loc (loc, code, type, op0, op1);
11969 return tem ? tem : expr;
11970 case 3:
11971 op0 = TREE_OPERAND (t, 0);
11972 op1 = TREE_OPERAND (t, 1);
11973 op2 = TREE_OPERAND (t, 2);
11974 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11975 return tem ? tem : expr;
11976 default:
11977 break;
11981 switch (code)
11983 case ARRAY_REF:
11985 tree op0 = TREE_OPERAND (t, 0);
11986 tree op1 = TREE_OPERAND (t, 1);
11988 if (TREE_CODE (op1) == INTEGER_CST
11989 && TREE_CODE (op0) == CONSTRUCTOR
11990 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
11992 tree val = get_array_ctor_element_at_index (op0,
11993 wi::to_offset (op1));
11994 if (val)
11995 return val;
11998 return t;
12001 /* Return a VECTOR_CST if possible. */
12002 case CONSTRUCTOR:
12004 tree type = TREE_TYPE (t);
12005 if (TREE_CODE (type) != VECTOR_TYPE)
12006 return t;
12008 unsigned i;
12009 tree val;
12010 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12011 if (! CONSTANT_CLASS_P (val))
12012 return t;
12014 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12017 case CONST_DECL:
12018 return fold (DECL_INITIAL (t));
12020 default:
12021 return t;
12022 } /* switch (code) */
12025 #ifdef ENABLE_FOLD_CHECKING
12026 #undef fold
12028 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12029 hash_table<nofree_ptr_hash<const tree_node> > *);
12030 static void fold_check_failed (const_tree, const_tree);
12031 void print_fold_checksum (const_tree);
12033 /* When --enable-checking=fold, compute a digest of expr before
12034 and after actual fold call to see if fold did not accidentally
12035 change original expr. */
12037 tree
12038 fold (tree expr)
12040 tree ret;
12041 struct md5_ctx ctx;
12042 unsigned char checksum_before[16], checksum_after[16];
12043 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12045 md5_init_ctx (&ctx);
12046 fold_checksum_tree (expr, &ctx, &ht);
12047 md5_finish_ctx (&ctx, checksum_before);
12048 ht.empty ();
12050 ret = fold_1 (expr);
12052 md5_init_ctx (&ctx);
12053 fold_checksum_tree (expr, &ctx, &ht);
12054 md5_finish_ctx (&ctx, checksum_after);
12056 if (memcmp (checksum_before, checksum_after, 16))
12057 fold_check_failed (expr, ret);
12059 return ret;
12062 void
12063 print_fold_checksum (const_tree expr)
12065 struct md5_ctx ctx;
12066 unsigned char checksum[16], cnt;
12067 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12069 md5_init_ctx (&ctx);
12070 fold_checksum_tree (expr, &ctx, &ht);
12071 md5_finish_ctx (&ctx, checksum);
12072 for (cnt = 0; cnt < 16; ++cnt)
12073 fprintf (stderr, "%02x", checksum[cnt]);
12074 putc ('\n', stderr);
12077 static void
12078 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12080 internal_error ("fold check: original tree changed by fold");
12083 static void
12084 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12085 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12087 const tree_node **slot;
12088 enum tree_code code;
12089 union tree_node buf;
12090 int i, len;
12092 recursive_label:
12093 if (expr == NULL)
12094 return;
12095 slot = ht->find_slot (expr, INSERT);
12096 if (*slot != NULL)
12097 return;
12098 *slot = expr;
12099 code = TREE_CODE (expr);
12100 if (TREE_CODE_CLASS (code) == tcc_declaration
12101 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12103 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12104 memcpy ((char *) &buf, expr, tree_size (expr));
12105 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12106 buf.decl_with_vis.symtab_node = NULL;
12107 expr = (tree) &buf;
12109 else if (TREE_CODE_CLASS (code) == tcc_type
12110 && (TYPE_POINTER_TO (expr)
12111 || TYPE_REFERENCE_TO (expr)
12112 || TYPE_CACHED_VALUES_P (expr)
12113 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12114 || TYPE_NEXT_VARIANT (expr)))
12116 /* Allow these fields to be modified. */
12117 tree tmp;
12118 memcpy ((char *) &buf, expr, tree_size (expr));
12119 expr = tmp = (tree) &buf;
12120 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12121 TYPE_POINTER_TO (tmp) = NULL;
12122 TYPE_REFERENCE_TO (tmp) = NULL;
12123 TYPE_NEXT_VARIANT (tmp) = NULL;
12124 if (TYPE_CACHED_VALUES_P (tmp))
12126 TYPE_CACHED_VALUES_P (tmp) = 0;
12127 TYPE_CACHED_VALUES (tmp) = NULL;
12130 md5_process_bytes (expr, tree_size (expr), ctx);
12131 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12132 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12133 if (TREE_CODE_CLASS (code) != tcc_type
12134 && TREE_CODE_CLASS (code) != tcc_declaration
12135 && code != TREE_LIST
12136 && code != SSA_NAME
12137 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12138 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12139 switch (TREE_CODE_CLASS (code))
12141 case tcc_constant:
12142 switch (code)
12144 case STRING_CST:
12145 md5_process_bytes (TREE_STRING_POINTER (expr),
12146 TREE_STRING_LENGTH (expr), ctx);
12147 break;
12148 case COMPLEX_CST:
12149 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12150 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12151 break;
12152 case VECTOR_CST:
12153 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12154 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12155 break;
12156 default:
12157 break;
12159 break;
12160 case tcc_exceptional:
12161 switch (code)
12163 case TREE_LIST:
12164 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12165 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12166 expr = TREE_CHAIN (expr);
12167 goto recursive_label;
12168 break;
12169 case TREE_VEC:
12170 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12171 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12172 break;
12173 default:
12174 break;
12176 break;
12177 case tcc_expression:
12178 case tcc_reference:
12179 case tcc_comparison:
12180 case tcc_unary:
12181 case tcc_binary:
12182 case tcc_statement:
12183 case tcc_vl_exp:
12184 len = TREE_OPERAND_LENGTH (expr);
12185 for (i = 0; i < len; ++i)
12186 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12187 break;
12188 case tcc_declaration:
12189 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12190 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12191 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12193 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12194 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12195 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12196 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12197 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12200 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12202 if (TREE_CODE (expr) == FUNCTION_DECL)
12204 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12205 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12207 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12209 break;
12210 case tcc_type:
12211 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12212 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12213 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12214 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12215 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12216 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12217 if (INTEGRAL_TYPE_P (expr)
12218 || SCALAR_FLOAT_TYPE_P (expr))
12220 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12221 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12223 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12224 if (TREE_CODE (expr) == RECORD_TYPE
12225 || TREE_CODE (expr) == UNION_TYPE
12226 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12227 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12228 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12229 break;
12230 default:
12231 break;
12235 /* Helper function for outputting the checksum of a tree T. When
12236 debugging with gdb, you can "define mynext" to be "next" followed
12237 by "call debug_fold_checksum (op0)", then just trace down till the
12238 outputs differ. */
12240 DEBUG_FUNCTION void
12241 debug_fold_checksum (const_tree t)
12243 int i;
12244 unsigned char checksum[16];
12245 struct md5_ctx ctx;
12246 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12248 md5_init_ctx (&ctx);
12249 fold_checksum_tree (t, &ctx, &ht);
12250 md5_finish_ctx (&ctx, checksum);
12251 ht.empty ();
12253 for (i = 0; i < 16; i++)
12254 fprintf (stderr, "%d ", checksum[i]);
12256 fprintf (stderr, "\n");
12259 #endif
12261 /* Fold a unary tree expression with code CODE of type TYPE with an
12262 operand OP0. LOC is the location of the resulting expression.
12263 Return a folded expression if successful. Otherwise, return a tree
12264 expression with code CODE of type TYPE with an operand OP0. */
12266 tree
12267 fold_build1_stat_loc (location_t loc,
12268 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12270 tree tem;
12271 #ifdef ENABLE_FOLD_CHECKING
12272 unsigned char checksum_before[16], checksum_after[16];
12273 struct md5_ctx ctx;
12274 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12276 md5_init_ctx (&ctx);
12277 fold_checksum_tree (op0, &ctx, &ht);
12278 md5_finish_ctx (&ctx, checksum_before);
12279 ht.empty ();
12280 #endif
12282 tem = fold_unary_loc (loc, code, type, op0);
12283 if (!tem)
12284 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12286 #ifdef ENABLE_FOLD_CHECKING
12287 md5_init_ctx (&ctx);
12288 fold_checksum_tree (op0, &ctx, &ht);
12289 md5_finish_ctx (&ctx, checksum_after);
12291 if (memcmp (checksum_before, checksum_after, 16))
12292 fold_check_failed (op0, tem);
12293 #endif
12294 return tem;
12297 /* Fold a binary tree expression with code CODE of type TYPE with
12298 operands OP0 and OP1. LOC is the location of the resulting
12299 expression. Return a folded expression if successful. Otherwise,
12300 return a tree expression with code CODE of type TYPE with operands
12301 OP0 and OP1. */
12303 tree
12304 fold_build2_stat_loc (location_t loc,
12305 enum tree_code code, tree type, tree op0, tree op1
12306 MEM_STAT_DECL)
12308 tree tem;
12309 #ifdef ENABLE_FOLD_CHECKING
12310 unsigned char checksum_before_op0[16],
12311 checksum_before_op1[16],
12312 checksum_after_op0[16],
12313 checksum_after_op1[16];
12314 struct md5_ctx ctx;
12315 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12317 md5_init_ctx (&ctx);
12318 fold_checksum_tree (op0, &ctx, &ht);
12319 md5_finish_ctx (&ctx, checksum_before_op0);
12320 ht.empty ();
12322 md5_init_ctx (&ctx);
12323 fold_checksum_tree (op1, &ctx, &ht);
12324 md5_finish_ctx (&ctx, checksum_before_op1);
12325 ht.empty ();
12326 #endif
12328 tem = fold_binary_loc (loc, code, type, op0, op1);
12329 if (!tem)
12330 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12332 #ifdef ENABLE_FOLD_CHECKING
12333 md5_init_ctx (&ctx);
12334 fold_checksum_tree (op0, &ctx, &ht);
12335 md5_finish_ctx (&ctx, checksum_after_op0);
12336 ht.empty ();
12338 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12339 fold_check_failed (op0, tem);
12341 md5_init_ctx (&ctx);
12342 fold_checksum_tree (op1, &ctx, &ht);
12343 md5_finish_ctx (&ctx, checksum_after_op1);
12345 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12346 fold_check_failed (op1, tem);
12347 #endif
12348 return tem;
12351 /* Fold a ternary tree expression with code CODE of type TYPE with
12352 operands OP0, OP1, and OP2. Return a folded expression if
12353 successful. Otherwise, return a tree expression with code CODE of
12354 type TYPE with operands OP0, OP1, and OP2. */
12356 tree
12357 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12358 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12360 tree tem;
12361 #ifdef ENABLE_FOLD_CHECKING
12362 unsigned char checksum_before_op0[16],
12363 checksum_before_op1[16],
12364 checksum_before_op2[16],
12365 checksum_after_op0[16],
12366 checksum_after_op1[16],
12367 checksum_after_op2[16];
12368 struct md5_ctx ctx;
12369 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12371 md5_init_ctx (&ctx);
12372 fold_checksum_tree (op0, &ctx, &ht);
12373 md5_finish_ctx (&ctx, checksum_before_op0);
12374 ht.empty ();
12376 md5_init_ctx (&ctx);
12377 fold_checksum_tree (op1, &ctx, &ht);
12378 md5_finish_ctx (&ctx, checksum_before_op1);
12379 ht.empty ();
12381 md5_init_ctx (&ctx);
12382 fold_checksum_tree (op2, &ctx, &ht);
12383 md5_finish_ctx (&ctx, checksum_before_op2);
12384 ht.empty ();
12385 #endif
12387 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12388 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12389 if (!tem)
12390 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12392 #ifdef ENABLE_FOLD_CHECKING
12393 md5_init_ctx (&ctx);
12394 fold_checksum_tree (op0, &ctx, &ht);
12395 md5_finish_ctx (&ctx, checksum_after_op0);
12396 ht.empty ();
12398 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12399 fold_check_failed (op0, tem);
12401 md5_init_ctx (&ctx);
12402 fold_checksum_tree (op1, &ctx, &ht);
12403 md5_finish_ctx (&ctx, checksum_after_op1);
12404 ht.empty ();
12406 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12407 fold_check_failed (op1, tem);
12409 md5_init_ctx (&ctx);
12410 fold_checksum_tree (op2, &ctx, &ht);
12411 md5_finish_ctx (&ctx, checksum_after_op2);
12413 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12414 fold_check_failed (op2, tem);
12415 #endif
12416 return tem;
12419 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12420 arguments in ARGARRAY, and a null static chain.
12421 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12422 of type TYPE from the given operands as constructed by build_call_array. */
12424 tree
12425 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12426 int nargs, tree *argarray)
12428 tree tem;
12429 #ifdef ENABLE_FOLD_CHECKING
12430 unsigned char checksum_before_fn[16],
12431 checksum_before_arglist[16],
12432 checksum_after_fn[16],
12433 checksum_after_arglist[16];
12434 struct md5_ctx ctx;
12435 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12436 int i;
12438 md5_init_ctx (&ctx);
12439 fold_checksum_tree (fn, &ctx, &ht);
12440 md5_finish_ctx (&ctx, checksum_before_fn);
12441 ht.empty ();
12443 md5_init_ctx (&ctx);
12444 for (i = 0; i < nargs; i++)
12445 fold_checksum_tree (argarray[i], &ctx, &ht);
12446 md5_finish_ctx (&ctx, checksum_before_arglist);
12447 ht.empty ();
12448 #endif
12450 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12451 if (!tem)
12452 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12454 #ifdef ENABLE_FOLD_CHECKING
12455 md5_init_ctx (&ctx);
12456 fold_checksum_tree (fn, &ctx, &ht);
12457 md5_finish_ctx (&ctx, checksum_after_fn);
12458 ht.empty ();
12460 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12461 fold_check_failed (fn, tem);
12463 md5_init_ctx (&ctx);
12464 for (i = 0; i < nargs; i++)
12465 fold_checksum_tree (argarray[i], &ctx, &ht);
12466 md5_finish_ctx (&ctx, checksum_after_arglist);
12468 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12469 fold_check_failed (NULL_TREE, tem);
12470 #endif
12471 return tem;
12474 /* Perform constant folding and related simplification of initializer
12475 expression EXPR. These behave identically to "fold_buildN" but ignore
12476 potential run-time traps and exceptions that fold must preserve. */
12478 #define START_FOLD_INIT \
12479 int saved_signaling_nans = flag_signaling_nans;\
12480 int saved_trapping_math = flag_trapping_math;\
12481 int saved_rounding_math = flag_rounding_math;\
12482 int saved_trapv = flag_trapv;\
12483 int saved_folding_initializer = folding_initializer;\
12484 flag_signaling_nans = 0;\
12485 flag_trapping_math = 0;\
12486 flag_rounding_math = 0;\
12487 flag_trapv = 0;\
12488 folding_initializer = 1;
12490 #define END_FOLD_INIT \
12491 flag_signaling_nans = saved_signaling_nans;\
12492 flag_trapping_math = saved_trapping_math;\
12493 flag_rounding_math = saved_rounding_math;\
12494 flag_trapv = saved_trapv;\
12495 folding_initializer = saved_folding_initializer;
12497 tree
12498 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12499 tree type, tree op)
12501 tree result;
12502 START_FOLD_INIT;
12504 result = fold_build1_loc (loc, code, type, op);
12506 END_FOLD_INIT;
12507 return result;
12510 tree
12511 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12512 tree type, tree op0, tree op1)
12514 tree result;
12515 START_FOLD_INIT;
12517 result = fold_build2_loc (loc, code, type, op0, op1);
12519 END_FOLD_INIT;
12520 return result;
12523 tree
12524 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12525 int nargs, tree *argarray)
12527 tree result;
12528 START_FOLD_INIT;
12530 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12532 END_FOLD_INIT;
12533 return result;
12536 #undef START_FOLD_INIT
12537 #undef END_FOLD_INIT
12539 /* Determine if first argument is a multiple of second argument. Return 0 if
12540 it is not, or we cannot easily determined it to be.
12542 An example of the sort of thing we care about (at this point; this routine
12543 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12544 fold cases do now) is discovering that
12546 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12548 is a multiple of
12550 SAVE_EXPR (J * 8)
12552 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12554 This code also handles discovering that
12556 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12558 is a multiple of 8 so we don't have to worry about dealing with a
12559 possible remainder.
12561 Note that we *look* inside a SAVE_EXPR only to determine how it was
12562 calculated; it is not safe for fold to do much of anything else with the
12563 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12564 at run time. For example, the latter example above *cannot* be implemented
12565 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12566 evaluation time of the original SAVE_EXPR is not necessarily the same at
12567 the time the new expression is evaluated. The only optimization of this
12568 sort that would be valid is changing
12570 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12572 divided by 8 to
12574 SAVE_EXPR (I) * SAVE_EXPR (J)
12576 (where the same SAVE_EXPR (J) is used in the original and the
12577 transformed version). */
12580 multiple_of_p (tree type, const_tree top, const_tree bottom)
12582 if (operand_equal_p (top, bottom, 0))
12583 return 1;
12585 if (TREE_CODE (type) != INTEGER_TYPE)
12586 return 0;
12588 switch (TREE_CODE (top))
12590 case BIT_AND_EXPR:
12591 /* Bitwise and provides a power of two multiple. If the mask is
12592 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12593 if (!integer_pow2p (bottom))
12594 return 0;
12595 /* FALLTHRU */
12597 case MULT_EXPR:
12598 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12599 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12601 case PLUS_EXPR:
12602 case MINUS_EXPR:
12603 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12604 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12606 case LSHIFT_EXPR:
12607 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12609 tree op1, t1;
12611 op1 = TREE_OPERAND (top, 1);
12612 /* const_binop may not detect overflow correctly,
12613 so check for it explicitly here. */
12614 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12615 && 0 != (t1 = fold_convert (type,
12616 const_binop (LSHIFT_EXPR,
12617 size_one_node,
12618 op1)))
12619 && !TREE_OVERFLOW (t1))
12620 return multiple_of_p (type, t1, bottom);
12622 return 0;
12624 case NOP_EXPR:
12625 /* Can't handle conversions from non-integral or wider integral type. */
12626 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12627 || (TYPE_PRECISION (type)
12628 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12629 return 0;
12631 /* .. fall through ... */
12633 case SAVE_EXPR:
12634 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12636 case COND_EXPR:
12637 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12638 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12640 case INTEGER_CST:
12641 if (TREE_CODE (bottom) != INTEGER_CST
12642 || integer_zerop (bottom)
12643 || (TYPE_UNSIGNED (type)
12644 && (tree_int_cst_sgn (top) < 0
12645 || tree_int_cst_sgn (bottom) < 0)))
12646 return 0;
12647 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12648 SIGNED);
12650 default:
12651 return 0;
12655 #define tree_expr_nonnegative_warnv_p(X, Y) \
12656 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12658 #define RECURSE(X) \
12659 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12661 /* Return true if CODE or TYPE is known to be non-negative. */
12663 static bool
12664 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12666 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12667 && truth_value_p (code))
12668 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12669 have a signed:1 type (where the value is -1 and 0). */
12670 return true;
12671 return false;
12674 /* Return true if (CODE OP0) is known to be non-negative. If the return
12675 value is based on the assumption that signed overflow is undefined,
12676 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12677 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12679 bool
12680 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12681 bool *strict_overflow_p, int depth)
12683 if (TYPE_UNSIGNED (type))
12684 return true;
12686 switch (code)
12688 case ABS_EXPR:
12689 /* We can't return 1 if flag_wrapv is set because
12690 ABS_EXPR<INT_MIN> = INT_MIN. */
12691 if (!ANY_INTEGRAL_TYPE_P (type))
12692 return true;
12693 if (TYPE_OVERFLOW_UNDEFINED (type))
12695 *strict_overflow_p = true;
12696 return true;
12698 break;
12700 case NON_LVALUE_EXPR:
12701 case FLOAT_EXPR:
12702 case FIX_TRUNC_EXPR:
12703 return RECURSE (op0);
12705 CASE_CONVERT:
12707 tree inner_type = TREE_TYPE (op0);
12708 tree outer_type = type;
12710 if (TREE_CODE (outer_type) == REAL_TYPE)
12712 if (TREE_CODE (inner_type) == REAL_TYPE)
12713 return RECURSE (op0);
12714 if (INTEGRAL_TYPE_P (inner_type))
12716 if (TYPE_UNSIGNED (inner_type))
12717 return true;
12718 return RECURSE (op0);
12721 else if (INTEGRAL_TYPE_P (outer_type))
12723 if (TREE_CODE (inner_type) == REAL_TYPE)
12724 return RECURSE (op0);
12725 if (INTEGRAL_TYPE_P (inner_type))
12726 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12727 && TYPE_UNSIGNED (inner_type);
12730 break;
12732 default:
12733 return tree_simple_nonnegative_warnv_p (code, type);
12736 /* We don't know sign of `t', so be conservative and return false. */
12737 return false;
12740 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12741 value is based on the assumption that signed overflow is undefined,
12742 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12743 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12745 bool
12746 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12747 tree op1, bool *strict_overflow_p,
12748 int depth)
12750 if (TYPE_UNSIGNED (type))
12751 return true;
12753 switch (code)
12755 case POINTER_PLUS_EXPR:
12756 case PLUS_EXPR:
12757 if (FLOAT_TYPE_P (type))
12758 return RECURSE (op0) && RECURSE (op1);
12760 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12761 both unsigned and at least 2 bits shorter than the result. */
12762 if (TREE_CODE (type) == INTEGER_TYPE
12763 && TREE_CODE (op0) == NOP_EXPR
12764 && TREE_CODE (op1) == NOP_EXPR)
12766 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12767 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12768 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12769 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12771 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12772 TYPE_PRECISION (inner2)) + 1;
12773 return prec < TYPE_PRECISION (type);
12776 break;
12778 case MULT_EXPR:
12779 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12781 /* x * x is always non-negative for floating point x
12782 or without overflow. */
12783 if (operand_equal_p (op0, op1, 0)
12784 || (RECURSE (op0) && RECURSE (op1)))
12786 if (ANY_INTEGRAL_TYPE_P (type)
12787 && TYPE_OVERFLOW_UNDEFINED (type))
12788 *strict_overflow_p = true;
12789 return true;
12793 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12794 both unsigned and their total bits is shorter than the result. */
12795 if (TREE_CODE (type) == INTEGER_TYPE
12796 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12797 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12799 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12800 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12801 : TREE_TYPE (op0);
12802 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12803 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12804 : TREE_TYPE (op1);
12806 bool unsigned0 = TYPE_UNSIGNED (inner0);
12807 bool unsigned1 = TYPE_UNSIGNED (inner1);
12809 if (TREE_CODE (op0) == INTEGER_CST)
12810 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12812 if (TREE_CODE (op1) == INTEGER_CST)
12813 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12815 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12816 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12818 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12819 ? tree_int_cst_min_precision (op0, UNSIGNED)
12820 : TYPE_PRECISION (inner0);
12822 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12823 ? tree_int_cst_min_precision (op1, UNSIGNED)
12824 : TYPE_PRECISION (inner1);
12826 return precision0 + precision1 < TYPE_PRECISION (type);
12829 return false;
12831 case BIT_AND_EXPR:
12832 case MAX_EXPR:
12833 return RECURSE (op0) || RECURSE (op1);
12835 case BIT_IOR_EXPR:
12836 case BIT_XOR_EXPR:
12837 case MIN_EXPR:
12838 case RDIV_EXPR:
12839 case TRUNC_DIV_EXPR:
12840 case CEIL_DIV_EXPR:
12841 case FLOOR_DIV_EXPR:
12842 case ROUND_DIV_EXPR:
12843 return RECURSE (op0) && RECURSE (op1);
12845 case TRUNC_MOD_EXPR:
12846 return RECURSE (op0);
12848 case FLOOR_MOD_EXPR:
12849 return RECURSE (op1);
12851 case CEIL_MOD_EXPR:
12852 case ROUND_MOD_EXPR:
12853 default:
12854 return tree_simple_nonnegative_warnv_p (code, type);
12857 /* We don't know sign of `t', so be conservative and return false. */
12858 return false;
12861 /* Return true if T is known to be non-negative. If the return
12862 value is based on the assumption that signed overflow is undefined,
12863 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12864 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12866 bool
12867 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12869 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12870 return true;
12872 switch (TREE_CODE (t))
12874 case INTEGER_CST:
12875 return tree_int_cst_sgn (t) >= 0;
12877 case REAL_CST:
12878 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12880 case FIXED_CST:
12881 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12883 case COND_EXPR:
12884 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12886 case SSA_NAME:
12887 /* Limit the depth of recursion to avoid quadratic behavior.
12888 This is expected to catch almost all occurrences in practice.
12889 If this code misses important cases that unbounded recursion
12890 would not, passes that need this information could be revised
12891 to provide it through dataflow propagation. */
12892 return (!name_registered_for_update_p (t)
12893 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12894 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12895 strict_overflow_p, depth));
12897 default:
12898 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12902 /* Return true if T is known to be non-negative. If the return
12903 value is based on the assumption that signed overflow is undefined,
12904 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12905 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12907 bool
12908 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
12909 bool *strict_overflow_p, int depth)
12911 switch (fn)
12913 CASE_CFN_ACOS:
12914 CASE_CFN_ACOSH:
12915 CASE_CFN_CABS:
12916 CASE_CFN_COSH:
12917 CASE_CFN_ERFC:
12918 CASE_CFN_EXP:
12919 CASE_CFN_EXP10:
12920 CASE_CFN_EXP2:
12921 CASE_CFN_FABS:
12922 CASE_CFN_FDIM:
12923 CASE_CFN_HYPOT:
12924 CASE_CFN_POW10:
12925 CASE_CFN_FFS:
12926 CASE_CFN_PARITY:
12927 CASE_CFN_POPCOUNT:
12928 CASE_CFN_CLZ:
12929 CASE_CFN_CLRSB:
12930 case CFN_BUILT_IN_BSWAP32:
12931 case CFN_BUILT_IN_BSWAP64:
12932 /* Always true. */
12933 return true;
12935 CASE_CFN_SQRT:
12936 /* sqrt(-0.0) is -0.0. */
12937 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
12938 return true;
12939 return RECURSE (arg0);
12941 CASE_CFN_ASINH:
12942 CASE_CFN_ATAN:
12943 CASE_CFN_ATANH:
12944 CASE_CFN_CBRT:
12945 CASE_CFN_CEIL:
12946 CASE_CFN_ERF:
12947 CASE_CFN_EXPM1:
12948 CASE_CFN_FLOOR:
12949 CASE_CFN_FMOD:
12950 CASE_CFN_FREXP:
12951 CASE_CFN_ICEIL:
12952 CASE_CFN_IFLOOR:
12953 CASE_CFN_IRINT:
12954 CASE_CFN_IROUND:
12955 CASE_CFN_LCEIL:
12956 CASE_CFN_LDEXP:
12957 CASE_CFN_LFLOOR:
12958 CASE_CFN_LLCEIL:
12959 CASE_CFN_LLFLOOR:
12960 CASE_CFN_LLRINT:
12961 CASE_CFN_LLROUND:
12962 CASE_CFN_LRINT:
12963 CASE_CFN_LROUND:
12964 CASE_CFN_MODF:
12965 CASE_CFN_NEARBYINT:
12966 CASE_CFN_RINT:
12967 CASE_CFN_ROUND:
12968 CASE_CFN_SCALB:
12969 CASE_CFN_SCALBLN:
12970 CASE_CFN_SCALBN:
12971 CASE_CFN_SIGNBIT:
12972 CASE_CFN_SIGNIFICAND:
12973 CASE_CFN_SINH:
12974 CASE_CFN_TANH:
12975 CASE_CFN_TRUNC:
12976 /* True if the 1st argument is nonnegative. */
12977 return RECURSE (arg0);
12979 CASE_CFN_FMAX:
12980 /* True if the 1st OR 2nd arguments are nonnegative. */
12981 return RECURSE (arg0) || RECURSE (arg1);
12983 CASE_CFN_FMIN:
12984 /* True if the 1st AND 2nd arguments are nonnegative. */
12985 return RECURSE (arg0) && RECURSE (arg1);
12987 CASE_CFN_COPYSIGN:
12988 /* True if the 2nd argument is nonnegative. */
12989 return RECURSE (arg1);
12991 CASE_CFN_POWI:
12992 /* True if the 1st argument is nonnegative or the second
12993 argument is an even integer. */
12994 if (TREE_CODE (arg1) == INTEGER_CST
12995 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
12996 return true;
12997 return RECURSE (arg0);
12999 CASE_CFN_POW:
13000 /* True if the 1st argument is nonnegative or the second
13001 argument is an even integer valued real. */
13002 if (TREE_CODE (arg1) == REAL_CST)
13004 REAL_VALUE_TYPE c;
13005 HOST_WIDE_INT n;
13007 c = TREE_REAL_CST (arg1);
13008 n = real_to_integer (&c);
13009 if ((n & 1) == 0)
13011 REAL_VALUE_TYPE cint;
13012 real_from_integer (&cint, VOIDmode, n, SIGNED);
13013 if (real_identical (&c, &cint))
13014 return true;
13017 return RECURSE (arg0);
13019 default:
13020 break;
13022 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13025 /* Return true if T is known to be non-negative. If the return
13026 value is based on the assumption that signed overflow is undefined,
13027 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13028 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13030 static bool
13031 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13033 enum tree_code code = TREE_CODE (t);
13034 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13035 return true;
13037 switch (code)
13039 case TARGET_EXPR:
13041 tree temp = TARGET_EXPR_SLOT (t);
13042 t = TARGET_EXPR_INITIAL (t);
13044 /* If the initializer is non-void, then it's a normal expression
13045 that will be assigned to the slot. */
13046 if (!VOID_TYPE_P (t))
13047 return RECURSE (t);
13049 /* Otherwise, the initializer sets the slot in some way. One common
13050 way is an assignment statement at the end of the initializer. */
13051 while (1)
13053 if (TREE_CODE (t) == BIND_EXPR)
13054 t = expr_last (BIND_EXPR_BODY (t));
13055 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13056 || TREE_CODE (t) == TRY_CATCH_EXPR)
13057 t = expr_last (TREE_OPERAND (t, 0));
13058 else if (TREE_CODE (t) == STATEMENT_LIST)
13059 t = expr_last (t);
13060 else
13061 break;
13063 if (TREE_CODE (t) == MODIFY_EXPR
13064 && TREE_OPERAND (t, 0) == temp)
13065 return RECURSE (TREE_OPERAND (t, 1));
13067 return false;
13070 case CALL_EXPR:
13072 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13073 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13075 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13076 get_call_combined_fn (t),
13077 arg0,
13078 arg1,
13079 strict_overflow_p, depth);
13081 case COMPOUND_EXPR:
13082 case MODIFY_EXPR:
13083 return RECURSE (TREE_OPERAND (t, 1));
13085 case BIND_EXPR:
13086 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13088 case SAVE_EXPR:
13089 return RECURSE (TREE_OPERAND (t, 0));
13091 default:
13092 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13096 #undef RECURSE
13097 #undef tree_expr_nonnegative_warnv_p
13099 /* Return true if T is known to be non-negative. If the return
13100 value is based on the assumption that signed overflow is undefined,
13101 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13102 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13104 bool
13105 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13107 enum tree_code code;
13108 if (t == error_mark_node)
13109 return false;
13111 code = TREE_CODE (t);
13112 switch (TREE_CODE_CLASS (code))
13114 case tcc_binary:
13115 case tcc_comparison:
13116 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13117 TREE_TYPE (t),
13118 TREE_OPERAND (t, 0),
13119 TREE_OPERAND (t, 1),
13120 strict_overflow_p, depth);
13122 case tcc_unary:
13123 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13124 TREE_TYPE (t),
13125 TREE_OPERAND (t, 0),
13126 strict_overflow_p, depth);
13128 case tcc_constant:
13129 case tcc_declaration:
13130 case tcc_reference:
13131 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13133 default:
13134 break;
13137 switch (code)
13139 case TRUTH_AND_EXPR:
13140 case TRUTH_OR_EXPR:
13141 case TRUTH_XOR_EXPR:
13142 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13143 TREE_TYPE (t),
13144 TREE_OPERAND (t, 0),
13145 TREE_OPERAND (t, 1),
13146 strict_overflow_p, depth);
13147 case TRUTH_NOT_EXPR:
13148 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13149 TREE_TYPE (t),
13150 TREE_OPERAND (t, 0),
13151 strict_overflow_p, depth);
13153 case COND_EXPR:
13154 case CONSTRUCTOR:
13155 case OBJ_TYPE_REF:
13156 case ASSERT_EXPR:
13157 case ADDR_EXPR:
13158 case WITH_SIZE_EXPR:
13159 case SSA_NAME:
13160 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13162 default:
13163 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13167 /* Return true if `t' is known to be non-negative. Handle warnings
13168 about undefined signed overflow. */
13170 bool
13171 tree_expr_nonnegative_p (tree t)
13173 bool ret, strict_overflow_p;
13175 strict_overflow_p = false;
13176 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13177 if (strict_overflow_p)
13178 fold_overflow_warning (("assuming signed overflow does not occur when "
13179 "determining that expression is always "
13180 "non-negative"),
13181 WARN_STRICT_OVERFLOW_MISC);
13182 return ret;
13186 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13187 For floating point we further ensure that T is not denormal.
13188 Similar logic is present in nonzero_address in rtlanal.h.
13190 If the return value is based on the assumption that signed overflow
13191 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13192 change *STRICT_OVERFLOW_P. */
13194 bool
13195 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13196 bool *strict_overflow_p)
13198 switch (code)
13200 case ABS_EXPR:
13201 return tree_expr_nonzero_warnv_p (op0,
13202 strict_overflow_p);
13204 case NOP_EXPR:
13206 tree inner_type = TREE_TYPE (op0);
13207 tree outer_type = type;
13209 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13210 && tree_expr_nonzero_warnv_p (op0,
13211 strict_overflow_p));
13213 break;
13215 case NON_LVALUE_EXPR:
13216 return tree_expr_nonzero_warnv_p (op0,
13217 strict_overflow_p);
13219 default:
13220 break;
13223 return false;
13226 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13227 For floating point we further ensure that T is not denormal.
13228 Similar logic is present in nonzero_address in rtlanal.h.
13230 If the return value is based on the assumption that signed overflow
13231 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13232 change *STRICT_OVERFLOW_P. */
13234 bool
13235 tree_binary_nonzero_warnv_p (enum tree_code code,
13236 tree type,
13237 tree op0,
13238 tree op1, bool *strict_overflow_p)
13240 bool sub_strict_overflow_p;
13241 switch (code)
13243 case POINTER_PLUS_EXPR:
13244 case PLUS_EXPR:
13245 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13247 /* With the presence of negative values it is hard
13248 to say something. */
13249 sub_strict_overflow_p = false;
13250 if (!tree_expr_nonnegative_warnv_p (op0,
13251 &sub_strict_overflow_p)
13252 || !tree_expr_nonnegative_warnv_p (op1,
13253 &sub_strict_overflow_p))
13254 return false;
13255 /* One of operands must be positive and the other non-negative. */
13256 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13257 overflows, on a twos-complement machine the sum of two
13258 nonnegative numbers can never be zero. */
13259 return (tree_expr_nonzero_warnv_p (op0,
13260 strict_overflow_p)
13261 || tree_expr_nonzero_warnv_p (op1,
13262 strict_overflow_p));
13264 break;
13266 case MULT_EXPR:
13267 if (TYPE_OVERFLOW_UNDEFINED (type))
13269 if (tree_expr_nonzero_warnv_p (op0,
13270 strict_overflow_p)
13271 && tree_expr_nonzero_warnv_p (op1,
13272 strict_overflow_p))
13274 *strict_overflow_p = true;
13275 return true;
13278 break;
13280 case MIN_EXPR:
13281 sub_strict_overflow_p = false;
13282 if (tree_expr_nonzero_warnv_p (op0,
13283 &sub_strict_overflow_p)
13284 && tree_expr_nonzero_warnv_p (op1,
13285 &sub_strict_overflow_p))
13287 if (sub_strict_overflow_p)
13288 *strict_overflow_p = true;
13290 break;
13292 case MAX_EXPR:
13293 sub_strict_overflow_p = false;
13294 if (tree_expr_nonzero_warnv_p (op0,
13295 &sub_strict_overflow_p))
13297 if (sub_strict_overflow_p)
13298 *strict_overflow_p = true;
13300 /* When both operands are nonzero, then MAX must be too. */
13301 if (tree_expr_nonzero_warnv_p (op1,
13302 strict_overflow_p))
13303 return true;
13305 /* MAX where operand 0 is positive is positive. */
13306 return tree_expr_nonnegative_warnv_p (op0,
13307 strict_overflow_p);
13309 /* MAX where operand 1 is positive is positive. */
13310 else if (tree_expr_nonzero_warnv_p (op1,
13311 &sub_strict_overflow_p)
13312 && tree_expr_nonnegative_warnv_p (op1,
13313 &sub_strict_overflow_p))
13315 if (sub_strict_overflow_p)
13316 *strict_overflow_p = true;
13317 return true;
13319 break;
13321 case BIT_IOR_EXPR:
13322 return (tree_expr_nonzero_warnv_p (op1,
13323 strict_overflow_p)
13324 || tree_expr_nonzero_warnv_p (op0,
13325 strict_overflow_p));
13327 default:
13328 break;
13331 return false;
13334 /* Return true when T is an address and is known to be nonzero.
13335 For floating point we further ensure that T is not denormal.
13336 Similar logic is present in nonzero_address in rtlanal.h.
13338 If the return value is based on the assumption that signed overflow
13339 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13340 change *STRICT_OVERFLOW_P. */
13342 bool
13343 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13345 bool sub_strict_overflow_p;
13346 switch (TREE_CODE (t))
13348 case INTEGER_CST:
13349 return !integer_zerop (t);
13351 case ADDR_EXPR:
13353 tree base = TREE_OPERAND (t, 0);
13355 if (!DECL_P (base))
13356 base = get_base_address (base);
13358 if (!base)
13359 return false;
13361 /* For objects in symbol table check if we know they are non-zero.
13362 Don't do anything for variables and functions before symtab is built;
13363 it is quite possible that they will be declared weak later. */
13364 if (DECL_P (base) && decl_in_symtab_p (base))
13366 struct symtab_node *symbol;
13368 symbol = symtab_node::get_create (base);
13369 if (symbol)
13370 return symbol->nonzero_address ();
13371 else
13372 return false;
13375 /* Function local objects are never NULL. */
13376 if (DECL_P (base)
13377 && (DECL_CONTEXT (base)
13378 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
13379 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
13380 return true;
13382 /* Constants are never weak. */
13383 if (CONSTANT_CLASS_P (base))
13384 return true;
13386 return false;
13389 case COND_EXPR:
13390 sub_strict_overflow_p = false;
13391 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13392 &sub_strict_overflow_p)
13393 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13394 &sub_strict_overflow_p))
13396 if (sub_strict_overflow_p)
13397 *strict_overflow_p = true;
13398 return true;
13400 break;
13402 default:
13403 break;
13405 return false;
13408 #define integer_valued_real_p(X) \
13409 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13411 #define RECURSE(X) \
13412 ((integer_valued_real_p) (X, depth + 1))
13414 /* Return true if the floating point result of (CODE OP0) has an
13415 integer value. We also allow +Inf, -Inf and NaN to be considered
13416 integer values.
13418 DEPTH is the current nesting depth of the query. */
13420 bool
13421 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13423 switch (code)
13425 case FLOAT_EXPR:
13426 return true;
13428 case ABS_EXPR:
13429 return RECURSE (op0);
13431 CASE_CONVERT:
13433 tree type = TREE_TYPE (op0);
13434 if (TREE_CODE (type) == INTEGER_TYPE)
13435 return true;
13436 if (TREE_CODE (type) == REAL_TYPE)
13437 return RECURSE (op0);
13438 break;
13441 default:
13442 break;
13444 return false;
13447 /* Return true if the floating point result of (CODE OP0 OP1) has an
13448 integer value. We also allow +Inf, -Inf and NaN to be considered
13449 integer values.
13451 DEPTH is the current nesting depth of the query. */
13453 bool
13454 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13456 switch (code)
13458 case PLUS_EXPR:
13459 case MINUS_EXPR:
13460 case MULT_EXPR:
13461 case MIN_EXPR:
13462 case MAX_EXPR:
13463 return RECURSE (op0) && RECURSE (op1);
13465 default:
13466 break;
13468 return false;
13471 /* Return true if the floating point result of calling FNDECL with arguments
13472 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13473 considered integer values. If FNDECL takes fewer than 2 arguments,
13474 the remaining ARGn are null.
13476 DEPTH is the current nesting depth of the query. */
13478 bool
13479 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13481 switch (fn)
13483 CASE_CFN_CEIL:
13484 CASE_CFN_FLOOR:
13485 CASE_CFN_NEARBYINT:
13486 CASE_CFN_RINT:
13487 CASE_CFN_ROUND:
13488 CASE_CFN_TRUNC:
13489 return true;
13491 CASE_CFN_FMIN:
13492 CASE_CFN_FMAX:
13493 return RECURSE (arg0) && RECURSE (arg1);
13495 default:
13496 break;
13498 return false;
13501 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13502 has an integer value. We also allow +Inf, -Inf and NaN to be
13503 considered integer values.
13505 DEPTH is the current nesting depth of the query. */
13507 bool
13508 integer_valued_real_single_p (tree t, int depth)
13510 switch (TREE_CODE (t))
13512 case REAL_CST:
13513 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13515 case COND_EXPR:
13516 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13518 case SSA_NAME:
13519 /* Limit the depth of recursion to avoid quadratic behavior.
13520 This is expected to catch almost all occurrences in practice.
13521 If this code misses important cases that unbounded recursion
13522 would not, passes that need this information could be revised
13523 to provide it through dataflow propagation. */
13524 return (!name_registered_for_update_p (t)
13525 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13526 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13527 depth));
13529 default:
13530 break;
13532 return false;
13535 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13536 has an integer value. We also allow +Inf, -Inf and NaN to be
13537 considered integer values.
13539 DEPTH is the current nesting depth of the query. */
13541 static bool
13542 integer_valued_real_invalid_p (tree t, int depth)
13544 switch (TREE_CODE (t))
13546 case COMPOUND_EXPR:
13547 case MODIFY_EXPR:
13548 case BIND_EXPR:
13549 return RECURSE (TREE_OPERAND (t, 1));
13551 case SAVE_EXPR:
13552 return RECURSE (TREE_OPERAND (t, 0));
13554 default:
13555 break;
13557 return false;
13560 #undef RECURSE
13561 #undef integer_valued_real_p
13563 /* Return true if the floating point expression T has an integer value.
13564 We also allow +Inf, -Inf and NaN to be considered integer values.
13566 DEPTH is the current nesting depth of the query. */
13568 bool
13569 integer_valued_real_p (tree t, int depth)
13571 if (t == error_mark_node)
13572 return false;
13574 tree_code code = TREE_CODE (t);
13575 switch (TREE_CODE_CLASS (code))
13577 case tcc_binary:
13578 case tcc_comparison:
13579 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13580 TREE_OPERAND (t, 1), depth);
13582 case tcc_unary:
13583 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13585 case tcc_constant:
13586 case tcc_declaration:
13587 case tcc_reference:
13588 return integer_valued_real_single_p (t, depth);
13590 default:
13591 break;
13594 switch (code)
13596 case COND_EXPR:
13597 case SSA_NAME:
13598 return integer_valued_real_single_p (t, depth);
13600 case CALL_EXPR:
13602 tree arg0 = (call_expr_nargs (t) > 0
13603 ? CALL_EXPR_ARG (t, 0)
13604 : NULL_TREE);
13605 tree arg1 = (call_expr_nargs (t) > 1
13606 ? CALL_EXPR_ARG (t, 1)
13607 : NULL_TREE);
13608 return integer_valued_real_call_p (get_call_combined_fn (t),
13609 arg0, arg1, depth);
13612 default:
13613 return integer_valued_real_invalid_p (t, depth);
13617 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13618 attempt to fold the expression to a constant without modifying TYPE,
13619 OP0 or OP1.
13621 If the expression could be simplified to a constant, then return
13622 the constant. If the expression would not be simplified to a
13623 constant, then return NULL_TREE. */
13625 tree
13626 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13628 tree tem = fold_binary (code, type, op0, op1);
13629 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13632 /* Given the components of a unary expression CODE, TYPE and OP0,
13633 attempt to fold the expression to a constant without modifying
13634 TYPE or OP0.
13636 If the expression could be simplified to a constant, then return
13637 the constant. If the expression would not be simplified to a
13638 constant, then return NULL_TREE. */
13640 tree
13641 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13643 tree tem = fold_unary (code, type, op0);
13644 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13647 /* If EXP represents referencing an element in a constant string
13648 (either via pointer arithmetic or array indexing), return the
13649 tree representing the value accessed, otherwise return NULL. */
13651 tree
13652 fold_read_from_constant_string (tree exp)
13654 if ((TREE_CODE (exp) == INDIRECT_REF
13655 || TREE_CODE (exp) == ARRAY_REF)
13656 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13658 tree exp1 = TREE_OPERAND (exp, 0);
13659 tree index;
13660 tree string;
13661 location_t loc = EXPR_LOCATION (exp);
13663 if (TREE_CODE (exp) == INDIRECT_REF)
13664 string = string_constant (exp1, &index);
13665 else
13667 tree low_bound = array_ref_low_bound (exp);
13668 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13670 /* Optimize the special-case of a zero lower bound.
13672 We convert the low_bound to sizetype to avoid some problems
13673 with constant folding. (E.g. suppose the lower bound is 1,
13674 and its mode is QI. Without the conversion,l (ARRAY
13675 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13676 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13677 if (! integer_zerop (low_bound))
13678 index = size_diffop_loc (loc, index,
13679 fold_convert_loc (loc, sizetype, low_bound));
13681 string = exp1;
13684 if (string
13685 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13686 && TREE_CODE (string) == STRING_CST
13687 && TREE_CODE (index) == INTEGER_CST
13688 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13689 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13690 == MODE_INT)
13691 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13692 return build_int_cst_type (TREE_TYPE (exp),
13693 (TREE_STRING_POINTER (string)
13694 [TREE_INT_CST_LOW (index)]));
13696 return NULL;
13699 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13700 an integer constant, real, or fixed-point constant.
13702 TYPE is the type of the result. */
13704 static tree
13705 fold_negate_const (tree arg0, tree type)
13707 tree t = NULL_TREE;
13709 switch (TREE_CODE (arg0))
13711 case INTEGER_CST:
13713 bool overflow;
13714 wide_int val = wi::neg (arg0, &overflow);
13715 t = force_fit_type (type, val, 1,
13716 (overflow | TREE_OVERFLOW (arg0))
13717 && !TYPE_UNSIGNED (type));
13718 break;
13721 case REAL_CST:
13722 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13723 break;
13725 case FIXED_CST:
13727 FIXED_VALUE_TYPE f;
13728 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13729 &(TREE_FIXED_CST (arg0)), NULL,
13730 TYPE_SATURATING (type));
13731 t = build_fixed (type, f);
13732 /* Propagate overflow flags. */
13733 if (overflow_p | TREE_OVERFLOW (arg0))
13734 TREE_OVERFLOW (t) = 1;
13735 break;
13738 default:
13739 gcc_unreachable ();
13742 return t;
13745 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13746 an integer constant or real constant.
13748 TYPE is the type of the result. */
13750 tree
13751 fold_abs_const (tree arg0, tree type)
13753 tree t = NULL_TREE;
13755 switch (TREE_CODE (arg0))
13757 case INTEGER_CST:
13759 /* If the value is unsigned or non-negative, then the absolute value
13760 is the same as the ordinary value. */
13761 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13762 t = arg0;
13764 /* If the value is negative, then the absolute value is
13765 its negation. */
13766 else
13768 bool overflow;
13769 wide_int val = wi::neg (arg0, &overflow);
13770 t = force_fit_type (type, val, -1,
13771 overflow | TREE_OVERFLOW (arg0));
13774 break;
13776 case REAL_CST:
13777 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13778 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13779 else
13780 t = arg0;
13781 break;
13783 default:
13784 gcc_unreachable ();
13787 return t;
13790 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13791 constant. TYPE is the type of the result. */
13793 static tree
13794 fold_not_const (const_tree arg0, tree type)
13796 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13798 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13801 /* Given CODE, a relational operator, the target type, TYPE and two
13802 constant operands OP0 and OP1, return the result of the
13803 relational operation. If the result is not a compile time
13804 constant, then return NULL_TREE. */
13806 static tree
13807 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13809 int result, invert;
13811 /* From here on, the only cases we handle are when the result is
13812 known to be a constant. */
13814 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13816 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13817 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13819 /* Handle the cases where either operand is a NaN. */
13820 if (real_isnan (c0) || real_isnan (c1))
13822 switch (code)
13824 case EQ_EXPR:
13825 case ORDERED_EXPR:
13826 result = 0;
13827 break;
13829 case NE_EXPR:
13830 case UNORDERED_EXPR:
13831 case UNLT_EXPR:
13832 case UNLE_EXPR:
13833 case UNGT_EXPR:
13834 case UNGE_EXPR:
13835 case UNEQ_EXPR:
13836 result = 1;
13837 break;
13839 case LT_EXPR:
13840 case LE_EXPR:
13841 case GT_EXPR:
13842 case GE_EXPR:
13843 case LTGT_EXPR:
13844 if (flag_trapping_math)
13845 return NULL_TREE;
13846 result = 0;
13847 break;
13849 default:
13850 gcc_unreachable ();
13853 return constant_boolean_node (result, type);
13856 return constant_boolean_node (real_compare (code, c0, c1), type);
13859 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13861 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13862 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13863 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13866 /* Handle equality/inequality of complex constants. */
13867 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13869 tree rcond = fold_relational_const (code, type,
13870 TREE_REALPART (op0),
13871 TREE_REALPART (op1));
13872 tree icond = fold_relational_const (code, type,
13873 TREE_IMAGPART (op0),
13874 TREE_IMAGPART (op1));
13875 if (code == EQ_EXPR)
13876 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13877 else if (code == NE_EXPR)
13878 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13879 else
13880 return NULL_TREE;
13883 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13885 unsigned count = VECTOR_CST_NELTS (op0);
13886 tree *elts = XALLOCAVEC (tree, count);
13887 gcc_assert (VECTOR_CST_NELTS (op1) == count
13888 && TYPE_VECTOR_SUBPARTS (type) == count);
13890 for (unsigned i = 0; i < count; i++)
13892 tree elem_type = TREE_TYPE (type);
13893 tree elem0 = VECTOR_CST_ELT (op0, i);
13894 tree elem1 = VECTOR_CST_ELT (op1, i);
13896 tree tem = fold_relational_const (code, elem_type,
13897 elem0, elem1);
13899 if (tem == NULL_TREE)
13900 return NULL_TREE;
13902 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
13905 return build_vector (type, elts);
13908 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13910 To compute GT, swap the arguments and do LT.
13911 To compute GE, do LT and invert the result.
13912 To compute LE, swap the arguments, do LT and invert the result.
13913 To compute NE, do EQ and invert the result.
13915 Therefore, the code below must handle only EQ and LT. */
13917 if (code == LE_EXPR || code == GT_EXPR)
13919 std::swap (op0, op1);
13920 code = swap_tree_comparison (code);
13923 /* Note that it is safe to invert for real values here because we
13924 have already handled the one case that it matters. */
13926 invert = 0;
13927 if (code == NE_EXPR || code == GE_EXPR)
13929 invert = 1;
13930 code = invert_tree_comparison (code, false);
13933 /* Compute a result for LT or EQ if args permit;
13934 Otherwise return T. */
13935 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13937 if (code == EQ_EXPR)
13938 result = tree_int_cst_equal (op0, op1);
13939 else
13940 result = tree_int_cst_lt (op0, op1);
13942 else
13943 return NULL_TREE;
13945 if (invert)
13946 result ^= 1;
13947 return constant_boolean_node (result, type);
13950 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13951 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13952 itself. */
13954 tree
13955 fold_build_cleanup_point_expr (tree type, tree expr)
13957 /* If the expression does not have side effects then we don't have to wrap
13958 it with a cleanup point expression. */
13959 if (!TREE_SIDE_EFFECTS (expr))
13960 return expr;
13962 /* If the expression is a return, check to see if the expression inside the
13963 return has no side effects or the right hand side of the modify expression
13964 inside the return. If either don't have side effects set we don't need to
13965 wrap the expression in a cleanup point expression. Note we don't check the
13966 left hand side of the modify because it should always be a return decl. */
13967 if (TREE_CODE (expr) == RETURN_EXPR)
13969 tree op = TREE_OPERAND (expr, 0);
13970 if (!op || !TREE_SIDE_EFFECTS (op))
13971 return expr;
13972 op = TREE_OPERAND (op, 1);
13973 if (!TREE_SIDE_EFFECTS (op))
13974 return expr;
13977 return build1 (CLEANUP_POINT_EXPR, type, expr);
13980 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13981 of an indirection through OP0, or NULL_TREE if no simplification is
13982 possible. */
13984 tree
13985 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
13987 tree sub = op0;
13988 tree subtype;
13990 STRIP_NOPS (sub);
13991 subtype = TREE_TYPE (sub);
13992 if (!POINTER_TYPE_P (subtype))
13993 return NULL_TREE;
13995 if (TREE_CODE (sub) == ADDR_EXPR)
13997 tree op = TREE_OPERAND (sub, 0);
13998 tree optype = TREE_TYPE (op);
13999 /* *&CONST_DECL -> to the value of the const decl. */
14000 if (TREE_CODE (op) == CONST_DECL)
14001 return DECL_INITIAL (op);
14002 /* *&p => p; make sure to handle *&"str"[cst] here. */
14003 if (type == optype)
14005 tree fop = fold_read_from_constant_string (op);
14006 if (fop)
14007 return fop;
14008 else
14009 return op;
14011 /* *(foo *)&fooarray => fooarray[0] */
14012 else if (TREE_CODE (optype) == ARRAY_TYPE
14013 && type == TREE_TYPE (optype)
14014 && (!in_gimple_form
14015 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14017 tree type_domain = TYPE_DOMAIN (optype);
14018 tree min_val = size_zero_node;
14019 if (type_domain && TYPE_MIN_VALUE (type_domain))
14020 min_val = TYPE_MIN_VALUE (type_domain);
14021 if (in_gimple_form
14022 && TREE_CODE (min_val) != INTEGER_CST)
14023 return NULL_TREE;
14024 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14025 NULL_TREE, NULL_TREE);
14027 /* *(foo *)&complexfoo => __real__ complexfoo */
14028 else if (TREE_CODE (optype) == COMPLEX_TYPE
14029 && type == TREE_TYPE (optype))
14030 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14031 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14032 else if (TREE_CODE (optype) == VECTOR_TYPE
14033 && type == TREE_TYPE (optype))
14035 tree part_width = TYPE_SIZE (type);
14036 tree index = bitsize_int (0);
14037 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14041 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14042 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14044 tree op00 = TREE_OPERAND (sub, 0);
14045 tree op01 = TREE_OPERAND (sub, 1);
14047 STRIP_NOPS (op00);
14048 if (TREE_CODE (op00) == ADDR_EXPR)
14050 tree op00type;
14051 op00 = TREE_OPERAND (op00, 0);
14052 op00type = TREE_TYPE (op00);
14054 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14055 if (TREE_CODE (op00type) == VECTOR_TYPE
14056 && type == TREE_TYPE (op00type))
14058 HOST_WIDE_INT offset = tree_to_shwi (op01);
14059 tree part_width = TYPE_SIZE (type);
14060 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
14061 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14062 tree index = bitsize_int (indexi);
14064 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
14065 return fold_build3_loc (loc,
14066 BIT_FIELD_REF, type, op00,
14067 part_width, index);
14070 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14071 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14072 && type == TREE_TYPE (op00type))
14074 tree size = TYPE_SIZE_UNIT (type);
14075 if (tree_int_cst_equal (size, op01))
14076 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14078 /* ((foo *)&fooarray)[1] => fooarray[1] */
14079 else if (TREE_CODE (op00type) == ARRAY_TYPE
14080 && type == TREE_TYPE (op00type))
14082 tree type_domain = TYPE_DOMAIN (op00type);
14083 tree min_val = size_zero_node;
14084 if (type_domain && TYPE_MIN_VALUE (type_domain))
14085 min_val = TYPE_MIN_VALUE (type_domain);
14086 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14087 TYPE_SIZE_UNIT (type));
14088 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14089 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14090 NULL_TREE, NULL_TREE);
14095 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14096 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14097 && type == TREE_TYPE (TREE_TYPE (subtype))
14098 && (!in_gimple_form
14099 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14101 tree type_domain;
14102 tree min_val = size_zero_node;
14103 sub = build_fold_indirect_ref_loc (loc, sub);
14104 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14105 if (type_domain && TYPE_MIN_VALUE (type_domain))
14106 min_val = TYPE_MIN_VALUE (type_domain);
14107 if (in_gimple_form
14108 && TREE_CODE (min_val) != INTEGER_CST)
14109 return NULL_TREE;
14110 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14111 NULL_TREE);
14114 return NULL_TREE;
14117 /* Builds an expression for an indirection through T, simplifying some
14118 cases. */
14120 tree
14121 build_fold_indirect_ref_loc (location_t loc, tree t)
14123 tree type = TREE_TYPE (TREE_TYPE (t));
14124 tree sub = fold_indirect_ref_1 (loc, type, t);
14126 if (sub)
14127 return sub;
14129 return build1_loc (loc, INDIRECT_REF, type, t);
14132 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14134 tree
14135 fold_indirect_ref_loc (location_t loc, tree t)
14137 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14139 if (sub)
14140 return sub;
14141 else
14142 return t;
14145 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14146 whose result is ignored. The type of the returned tree need not be
14147 the same as the original expression. */
14149 tree
14150 fold_ignored_result (tree t)
14152 if (!TREE_SIDE_EFFECTS (t))
14153 return integer_zero_node;
14155 for (;;)
14156 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14158 case tcc_unary:
14159 t = TREE_OPERAND (t, 0);
14160 break;
14162 case tcc_binary:
14163 case tcc_comparison:
14164 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14165 t = TREE_OPERAND (t, 0);
14166 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14167 t = TREE_OPERAND (t, 1);
14168 else
14169 return t;
14170 break;
14172 case tcc_expression:
14173 switch (TREE_CODE (t))
14175 case COMPOUND_EXPR:
14176 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14177 return t;
14178 t = TREE_OPERAND (t, 0);
14179 break;
14181 case COND_EXPR:
14182 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14183 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14184 return t;
14185 t = TREE_OPERAND (t, 0);
14186 break;
14188 default:
14189 return t;
14191 break;
14193 default:
14194 return t;
14198 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14200 tree
14201 round_up_loc (location_t loc, tree value, unsigned int divisor)
14203 tree div = NULL_TREE;
14205 if (divisor == 1)
14206 return value;
14208 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14209 have to do anything. Only do this when we are not given a const,
14210 because in that case, this check is more expensive than just
14211 doing it. */
14212 if (TREE_CODE (value) != INTEGER_CST)
14214 div = build_int_cst (TREE_TYPE (value), divisor);
14216 if (multiple_of_p (TREE_TYPE (value), value, div))
14217 return value;
14220 /* If divisor is a power of two, simplify this to bit manipulation. */
14221 if (divisor == (divisor & -divisor))
14223 if (TREE_CODE (value) == INTEGER_CST)
14225 wide_int val = value;
14226 bool overflow_p;
14228 if ((val & (divisor - 1)) == 0)
14229 return value;
14231 overflow_p = TREE_OVERFLOW (value);
14232 val += divisor - 1;
14233 val &= - (int) divisor;
14234 if (val == 0)
14235 overflow_p = true;
14237 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14239 else
14241 tree t;
14243 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14244 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14245 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14246 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14249 else
14251 if (!div)
14252 div = build_int_cst (TREE_TYPE (value), divisor);
14253 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14254 value = size_binop_loc (loc, MULT_EXPR, value, div);
14257 return value;
14260 /* Likewise, but round down. */
14262 tree
14263 round_down_loc (location_t loc, tree value, int divisor)
14265 tree div = NULL_TREE;
14267 gcc_assert (divisor > 0);
14268 if (divisor == 1)
14269 return value;
14271 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14272 have to do anything. Only do this when we are not given a const,
14273 because in that case, this check is more expensive than just
14274 doing it. */
14275 if (TREE_CODE (value) != INTEGER_CST)
14277 div = build_int_cst (TREE_TYPE (value), divisor);
14279 if (multiple_of_p (TREE_TYPE (value), value, div))
14280 return value;
14283 /* If divisor is a power of two, simplify this to bit manipulation. */
14284 if (divisor == (divisor & -divisor))
14286 tree t;
14288 t = build_int_cst (TREE_TYPE (value), -divisor);
14289 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14291 else
14293 if (!div)
14294 div = build_int_cst (TREE_TYPE (value), divisor);
14295 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14296 value = size_binop_loc (loc, MULT_EXPR, value, div);
14299 return value;
14302 /* Returns the pointer to the base of the object addressed by EXP and
14303 extracts the information about the offset of the access, storing it
14304 to PBITPOS and POFFSET. */
14306 static tree
14307 split_address_to_core_and_offset (tree exp,
14308 HOST_WIDE_INT *pbitpos, tree *poffset)
14310 tree core;
14311 machine_mode mode;
14312 int unsignedp, reversep, volatilep;
14313 HOST_WIDE_INT bitsize;
14314 location_t loc = EXPR_LOCATION (exp);
14316 if (TREE_CODE (exp) == ADDR_EXPR)
14318 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14319 poffset, &mode, &unsignedp, &reversep,
14320 &volatilep, false);
14321 core = build_fold_addr_expr_loc (loc, core);
14323 else
14325 core = exp;
14326 *pbitpos = 0;
14327 *poffset = NULL_TREE;
14330 return core;
14333 /* Returns true if addresses of E1 and E2 differ by a constant, false
14334 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14336 bool
14337 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14339 tree core1, core2;
14340 HOST_WIDE_INT bitpos1, bitpos2;
14341 tree toffset1, toffset2, tdiff, type;
14343 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14344 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14346 if (bitpos1 % BITS_PER_UNIT != 0
14347 || bitpos2 % BITS_PER_UNIT != 0
14348 || !operand_equal_p (core1, core2, 0))
14349 return false;
14351 if (toffset1 && toffset2)
14353 type = TREE_TYPE (toffset1);
14354 if (type != TREE_TYPE (toffset2))
14355 toffset2 = fold_convert (type, toffset2);
14357 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14358 if (!cst_and_fits_in_hwi (tdiff))
14359 return false;
14361 *diff = int_cst_value (tdiff);
14363 else if (toffset1 || toffset2)
14365 /* If only one of the offsets is non-constant, the difference cannot
14366 be a constant. */
14367 return false;
14369 else
14370 *diff = 0;
14372 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14373 return true;
14376 /* Return OFF converted to a pointer offset type suitable as offset for
14377 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14378 tree
14379 convert_to_ptrofftype_loc (location_t loc, tree off)
14381 return fold_convert_loc (loc, sizetype, off);
14384 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14385 tree
14386 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14388 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14389 ptr, convert_to_ptrofftype_loc (loc, off));
14392 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14393 tree
14394 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14396 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14397 ptr, size_int (off));
14400 /* Return a char pointer for a C string if it is a string constant
14401 or sum of string constant and integer constant. */
14403 const char *
14404 c_getstr (tree src)
14406 tree offset_node;
14408 src = string_constant (src, &offset_node);
14409 if (src == 0)
14410 return 0;
14412 if (offset_node == 0)
14413 return TREE_STRING_POINTER (src);
14414 else if (!tree_fits_uhwi_p (offset_node)
14415 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
14416 return 0;
14418 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);