1 /* Utility routines for data type conversion for GCC.
2 Copyright (C) 1987-2024 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* These routines are somewhat language-independent utility function
22 intended to be called by the language-specific convert () functions. */
26 #include "coretypes.h"
29 #include "diagnostic-core.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
33 #include "langhooks.h"
36 #include "stringpool.h"
41 #define maybe_fold_build1_loc(FOLD_P, LOC, CODE, TYPE, EXPR) \
42 ((FOLD_P) ? fold_build1_loc (LOC, CODE, TYPE, EXPR) \
43 : build1_loc (LOC, CODE, TYPE, EXPR))
44 #define maybe_fold_build2_loc(FOLD_P, LOC, CODE, TYPE, EXPR1, EXPR2) \
45 ((FOLD_P) ? fold_build2_loc (LOC, CODE, TYPE, EXPR1, EXPR2) \
46 : build2_loc (LOC, CODE, TYPE, EXPR1, EXPR2))
48 /* Convert EXPR to some pointer or reference type TYPE.
49 EXPR must be pointer, reference, integer, enumeral, or literal zero;
50 in other cases error is called. If FOLD_P is true, try to fold the
54 convert_to_pointer_1 (tree type
, tree expr
, bool fold_p
)
56 location_t loc
= EXPR_LOCATION (expr
);
57 if (TREE_TYPE (expr
) == type
)
60 switch (TREE_CODE (TREE_TYPE (expr
)))
65 /* If the pointers point to different address spaces, conversion needs
66 to be done via a ADDR_SPACE_CONVERT_EXPR instead of a NOP_EXPR. */
67 addr_space_t to_as
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
68 addr_space_t from_as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr
)));
71 return maybe_fold_build1_loc (fold_p
, loc
, NOP_EXPR
, type
, expr
);
73 return maybe_fold_build1_loc (fold_p
, loc
, ADDR_SPACE_CONVERT_EXPR
,
82 /* If the input precision differs from the target pointer type
83 precision, first convert the input expression to an integer type of
84 the target precision. Some targets, e.g. VMS, need several pointer
85 sizes to coexist so the latter isn't necessarily POINTER_SIZE. */
86 unsigned int pprec
= TYPE_PRECISION (type
);
87 unsigned int eprec
= TYPE_PRECISION (TREE_TYPE (expr
));
91 = maybe_fold_build1_loc (fold_p
, loc
, NOP_EXPR
,
92 lang_hooks
.types
.type_for_size (pprec
, 0),
95 return maybe_fold_build1_loc (fold_p
, loc
, CONVERT_EXPR
, type
, expr
);
98 error ("cannot convert to a pointer type");
99 return error_mark_node
;
103 /* Subroutine of the various convert_to_*_maybe_fold routines.
105 If a location wrapper has been folded to a constant (presumably of
106 a different type), re-wrap the new constant with a location wrapper. */
109 preserve_any_location_wrapper (tree result
, tree orig_expr
)
111 if (CONSTANT_CLASS_P (result
) && location_wrapper_p (orig_expr
))
113 if (result
== TREE_OPERAND (orig_expr
, 0))
116 return maybe_wrap_with_location (result
, EXPR_LOCATION (orig_expr
));
122 /* A wrapper around convert_to_pointer_1 that always folds the
126 convert_to_pointer (tree type
, tree expr
)
128 return convert_to_pointer_1 (type
, expr
, true);
131 /* A wrapper around convert_to_pointer_1 that only folds the
132 expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P. */
135 convert_to_pointer_maybe_fold (tree type
, tree expr
, bool dofold
)
138 = convert_to_pointer_1 (type
, expr
,
139 dofold
|| CONSTANT_CLASS_OR_WRAPPER_P (expr
));
140 return preserve_any_location_wrapper (result
, expr
);
143 /* Convert EXPR to some floating-point type TYPE.
145 EXPR must be float, fixed-point, integer, or enumeral;
146 in other cases error is called. If FOLD_P is true, try to fold
150 convert_to_real_1 (tree type
, tree expr
, bool fold_p
)
152 enum built_in_function fcode
= builtin_mathfn_code (expr
);
153 tree itype
= TREE_TYPE (expr
);
154 location_t loc
= EXPR_LOCATION (expr
);
156 if (TREE_CODE (expr
) == COMPOUND_EXPR
)
158 tree t
= convert_to_real_1 (type
, TREE_OPERAND (expr
, 1), fold_p
);
159 if (t
== TREE_OPERAND (expr
, 1))
161 return build2_loc (EXPR_LOCATION (expr
), COMPOUND_EXPR
, TREE_TYPE (t
),
162 TREE_OPERAND (expr
, 0), t
);
165 /* Disable until we figure out how to decide whether the functions are
166 present in runtime. */
167 /* Convert (float)sqrt((double)x) where x is float into sqrtf(x) */
169 && (TYPE_MODE (type
) == TYPE_MODE (double_type_node
)
170 || TYPE_MODE (type
) == TYPE_MODE (float_type_node
)))
174 #define CASE_MATHFN(FN) case BUILT_IN_##FN: case BUILT_IN_##FN##L:
189 /* The above functions may set errno differently with float
190 input or output so this transformation is not safe with
212 /* The above functions are not safe to do this conversion. */
213 if (!flag_unsafe_math_optimizations
)
220 if (call_expr_nargs (expr
) != 1
221 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (expr
, 0))))
224 tree arg0
= strip_float_extensions (CALL_EXPR_ARG (expr
, 0));
227 /* We have (outertype)sqrt((innertype)x). Choose the wider mode
228 from the both as the safe type for operation. */
229 if (TYPE_PRECISION (TREE_TYPE (arg0
)) > TYPE_PRECISION (type
))
230 newtype
= TREE_TYPE (arg0
);
232 /* We consider to convert
234 (T1) sqrtT2 ((T2) exprT3)
236 (T1) sqrtT4 ((T4) exprT3)
238 , where T1 is TYPE, T2 is ITYPE, T3 is TREE_TYPE (ARG0),
239 and T4 is NEWTYPE. All those types are of floating-point types.
240 T4 (NEWTYPE) should be narrower than T2 (ITYPE). This conversion
241 is safe only if P1 >= P2*2+2, where P1 and P2 are precisions of
242 T2 and T4. See the following URL for a reference:
243 http://stackoverflow.com/questions/9235456/determining-
244 floating-point-square-root
246 if ((fcode
== BUILT_IN_SQRT
|| fcode
== BUILT_IN_SQRTL
)
247 && !flag_unsafe_math_optimizations
)
249 /* The following conversion is unsafe even the precision condition
252 (float) sqrtl ((long double) double_val) -> (float) sqrt (double_val)
254 if (TYPE_MODE (type
) != TYPE_MODE (newtype
))
257 int p1
= REAL_MODE_FORMAT (TYPE_MODE (itype
))->p
;
258 int p2
= REAL_MODE_FORMAT (TYPE_MODE (newtype
))->p
;
263 /* Be careful about integer to fp conversions.
264 These may overflow still. */
265 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
266 && TYPE_PRECISION (newtype
) < TYPE_PRECISION (itype
)
267 && (TYPE_MODE (newtype
) == TYPE_MODE (double_type_node
)
268 || TYPE_MODE (newtype
) == TYPE_MODE (float_type_node
)))
270 tree fn
= mathfn_built_in (newtype
, fcode
);
273 tree arg
= convert_to_real_1 (newtype
, arg0
, fold_p
);
274 expr
= build_call_expr (fn
, 1, arg
);
285 /* Propagate the cast into the operation. */
286 if (itype
!= type
&& FLOAT_TYPE_P (type
))
287 switch (TREE_CODE (expr
))
289 /* Convert (float)-x into -(float)x. This is safe for
290 round-to-nearest rounding mode when the inner type is float. */
293 if (!flag_rounding_math
294 && FLOAT_TYPE_P (itype
)
295 && element_precision (type
) < element_precision (itype
))
297 tree arg
= convert_to_real_1 (type
, TREE_OPERAND (expr
, 0),
299 return build1 (TREE_CODE (expr
), type
, arg
);
306 switch (TREE_CODE (TREE_TYPE (expr
)))
309 /* Ignore the conversion if we don't need to store intermediate
310 results and neither type is a decimal float. */
311 return build1_loc (loc
,
313 || DECIMAL_FLOAT_TYPE_P (type
)
314 || DECIMAL_FLOAT_TYPE_P (itype
))
315 ? CONVERT_EXPR
: NOP_EXPR
, type
, expr
);
321 return build1 (FLOAT_EXPR
, type
, expr
);
323 case FIXED_POINT_TYPE
:
324 return build1 (FIXED_CONVERT_EXPR
, type
, expr
);
327 return convert (type
,
328 maybe_fold_build1_loc (fold_p
, loc
, REALPART_EXPR
,
329 TREE_TYPE (TREE_TYPE (expr
)),
334 error ("pointer value used where a floating-point was expected");
335 return error_mark_node
;
338 error ("vector value used where a floating-point was expected");
339 return error_mark_node
;
342 error ("aggregate value used where a floating-point was expected");
343 return error_mark_node
;
347 /* A wrapper around convert_to_real_1 that always folds the
351 convert_to_real (tree type
, tree expr
)
353 return convert_to_real_1 (type
, expr
, true);
356 /* A wrapper around convert_to_real_1 that only folds the
357 expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P. */
360 convert_to_real_maybe_fold (tree type
, tree expr
, bool dofold
)
363 = convert_to_real_1 (type
, expr
,
364 dofold
|| CONSTANT_CLASS_OR_WRAPPER_P (expr
));
365 return preserve_any_location_wrapper (result
, expr
);
368 /* Try to narrow EX_FORM ARG0 ARG1 in narrowed arg types producing a
372 do_narrow (location_t loc
,
373 enum tree_code ex_form
, tree type
, tree arg0
, tree arg1
,
374 tree expr
, unsigned inprec
, unsigned outprec
, bool dofold
)
376 /* Do the arithmetic in type TYPEX,
377 then convert result to TYPE. */
380 /* Can't do arithmetic in enumeral types
381 so use an integer type that will hold the values. */
382 if (TREE_CODE (typex
) == ENUMERAL_TYPE
)
383 typex
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (typex
),
384 TYPE_UNSIGNED (typex
));
386 /* The type demotion below might cause doing unsigned arithmetic
387 instead of signed, and thus hide overflow bugs. */
388 if ((ex_form
== PLUS_EXPR
|| ex_form
== MINUS_EXPR
)
389 && !TYPE_UNSIGNED (typex
)
390 && sanitize_flags_p (SANITIZE_SI_OVERFLOW
))
393 /* Similarly for multiplication, but in that case it can be
394 problematic even if typex is unsigned type - 0xffff * 0xffff
396 if (ex_form
== MULT_EXPR
397 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (expr
))
398 && sanitize_flags_p (SANITIZE_SI_OVERFLOW
))
401 /* But now perhaps TYPEX is as wide as INPREC.
402 In that case, do nothing special here.
403 (Otherwise would recurse infinitely in convert. */
404 if (TYPE_PRECISION (typex
) != inprec
)
406 /* Don't do unsigned arithmetic where signed was wanted,
408 Exception: if both of the original operands were
409 unsigned then we can safely do the work as unsigned.
410 Exception: shift operations take their type solely
411 from the first argument.
412 Exception: the LSHIFT_EXPR case above requires that
413 we perform this operation unsigned lest we produce
414 signed-overflow undefinedness.
415 And we may need to do it as unsigned
416 if we truncate to the original size. */
417 if (TYPE_UNSIGNED (TREE_TYPE (expr
))
418 || (TYPE_UNSIGNED (TREE_TYPE (arg0
))
419 && (TYPE_UNSIGNED (TREE_TYPE (arg1
))
420 || ex_form
== LSHIFT_EXPR
421 || ex_form
== RSHIFT_EXPR
422 || ex_form
== LROTATE_EXPR
423 || ex_form
== RROTATE_EXPR
))
424 || ex_form
== LSHIFT_EXPR
425 /* If we have !flag_wrapv, and either ARG0 or
426 ARG1 is of a signed type, we have to do
427 PLUS_EXPR, MINUS_EXPR or MULT_EXPR in an unsigned
428 type in case the operation in outprec precision
429 could overflow. Otherwise, we would introduce
430 signed-overflow undefinedness. */
431 || ((!(INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
432 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
433 || !(INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
434 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
))))
435 && ((TYPE_PRECISION (TREE_TYPE (arg0
)) * 2u
437 || (TYPE_PRECISION (TREE_TYPE (arg1
)) * 2u
439 && (ex_form
== PLUS_EXPR
440 || ex_form
== MINUS_EXPR
441 || ex_form
== MULT_EXPR
)))
443 if (!TYPE_UNSIGNED (typex
))
444 typex
= unsigned_type_for (typex
);
448 if (TYPE_UNSIGNED (typex
))
449 typex
= signed_type_for (typex
);
451 /* We should do away with all this once we have a proper
452 type promotion/demotion pass, see PR45397. */
453 expr
= maybe_fold_build2_loc (dofold
, loc
, ex_form
, typex
,
454 convert (typex
, arg0
),
455 convert (typex
, arg1
));
456 return convert (type
, expr
);
462 /* Convert EXPR to some integer (or enum) type TYPE.
464 EXPR must be pointer, integer, discrete (enum, char, or bool), float,
465 fixed-point or vector; in other cases error is called.
467 If DOFOLD is TRUE, we try to simplify newly-created patterns by folding.
469 The result of this is always supposed to be a newly created tree node
470 not in use in any existing structure. */
473 convert_to_integer_1 (tree type
, tree expr
, bool dofold
)
475 enum tree_code ex_form
= TREE_CODE (expr
);
476 tree intype
= TREE_TYPE (expr
);
477 unsigned int inprec
= element_precision (intype
);
478 unsigned int outprec
= element_precision (type
);
479 location_t loc
= EXPR_LOCATION (expr
);
481 /* An INTEGER_TYPE cannot be incomplete, but an ENUMERAL_TYPE can
482 be. Consider `enum E = { a, b = (enum E) 3 };'. */
483 if (!COMPLETE_TYPE_P (type
))
485 error ("conversion to incomplete type");
486 return error_mark_node
;
489 if (ex_form
== COMPOUND_EXPR
)
491 tree t
= convert_to_integer_1 (type
, TREE_OPERAND (expr
, 1), dofold
);
492 if (t
== TREE_OPERAND (expr
, 1))
494 return build2_loc (EXPR_LOCATION (expr
), COMPOUND_EXPR
, TREE_TYPE (t
),
495 TREE_OPERAND (expr
, 0), t
);
498 /* Convert e.g. (long)round(d) -> lround(d). */
499 /* If we're converting to char, we may encounter differing behavior
500 between converting from double->char vs double->long->char.
501 We're in "undefined" territory but we prefer to be conservative,
502 so only proceed in "unsafe" math mode. */
504 && (flag_unsafe_math_optimizations
505 || (long_integer_type_node
506 && outprec
>= TYPE_PRECISION (long_integer_type_node
))))
508 tree s_expr
= strip_float_extensions (expr
);
509 tree s_intype
= TREE_TYPE (s_expr
);
510 const enum built_in_function fcode
= builtin_mathfn_code (s_expr
);
515 CASE_FLT_FN (BUILT_IN_CEIL
):
516 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL
):
517 /* Only convert in ISO C99 mode. */
518 if (!targetm
.libc_has_function (function_c99_misc
, intype
))
520 if (outprec
< TYPE_PRECISION (integer_type_node
)
521 || (outprec
== TYPE_PRECISION (integer_type_node
)
522 && !TYPE_UNSIGNED (type
)))
523 fn
= mathfn_built_in (s_intype
, BUILT_IN_ICEIL
);
524 else if (outprec
== TYPE_PRECISION (long_integer_type_node
)
525 && !TYPE_UNSIGNED (type
))
526 fn
= mathfn_built_in (s_intype
, BUILT_IN_LCEIL
);
527 else if (outprec
== TYPE_PRECISION (long_long_integer_type_node
)
528 && !TYPE_UNSIGNED (type
))
529 fn
= mathfn_built_in (s_intype
, BUILT_IN_LLCEIL
);
532 CASE_FLT_FN (BUILT_IN_FLOOR
):
533 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR
):
534 /* Only convert in ISO C99 mode. */
535 if (!targetm
.libc_has_function (function_c99_misc
, intype
))
537 if (outprec
< TYPE_PRECISION (integer_type_node
)
538 || (outprec
== TYPE_PRECISION (integer_type_node
)
539 && !TYPE_UNSIGNED (type
)))
540 fn
= mathfn_built_in (s_intype
, BUILT_IN_IFLOOR
);
541 else if (outprec
== TYPE_PRECISION (long_integer_type_node
)
542 && !TYPE_UNSIGNED (type
))
543 fn
= mathfn_built_in (s_intype
, BUILT_IN_LFLOOR
);
544 else if (outprec
== TYPE_PRECISION (long_long_integer_type_node
)
545 && !TYPE_UNSIGNED (type
))
546 fn
= mathfn_built_in (s_intype
, BUILT_IN_LLFLOOR
);
549 CASE_FLT_FN (BUILT_IN_ROUND
):
550 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND
):
551 /* Only convert in ISO C99 mode and with -fno-math-errno. */
552 if (!targetm
.libc_has_function (function_c99_misc
, intype
)
555 if (outprec
< TYPE_PRECISION (integer_type_node
)
556 || (outprec
== TYPE_PRECISION (integer_type_node
)
557 && !TYPE_UNSIGNED (type
)))
558 fn
= mathfn_built_in (s_intype
, BUILT_IN_IROUND
);
559 else if (outprec
== TYPE_PRECISION (long_integer_type_node
)
560 && !TYPE_UNSIGNED (type
))
561 fn
= mathfn_built_in (s_intype
, BUILT_IN_LROUND
);
562 else if (outprec
== TYPE_PRECISION (long_long_integer_type_node
)
563 && !TYPE_UNSIGNED (type
))
564 fn
= mathfn_built_in (s_intype
, BUILT_IN_LLROUND
);
567 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
568 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT
):
569 /* Only convert nearbyint* if we can ignore math exceptions. */
570 if (flag_trapping_math
)
573 CASE_FLT_FN (BUILT_IN_RINT
):
574 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT
):
575 /* Only convert in ISO C99 mode and with -fno-math-errno. */
576 if (!targetm
.libc_has_function (function_c99_misc
, intype
)
579 if (outprec
< TYPE_PRECISION (integer_type_node
)
580 || (outprec
== TYPE_PRECISION (integer_type_node
)
581 && !TYPE_UNSIGNED (type
)))
582 fn
= mathfn_built_in (s_intype
, BUILT_IN_IRINT
);
583 else if (outprec
== TYPE_PRECISION (long_integer_type_node
)
584 && !TYPE_UNSIGNED (type
))
585 fn
= mathfn_built_in (s_intype
, BUILT_IN_LRINT
);
586 else if (outprec
== TYPE_PRECISION (long_long_integer_type_node
)
587 && !TYPE_UNSIGNED (type
))
588 fn
= mathfn_built_in (s_intype
, BUILT_IN_LLRINT
);
591 CASE_FLT_FN (BUILT_IN_TRUNC
):
592 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC
):
593 if (call_expr_nargs (s_expr
) != 1
594 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (s_expr
, 0)))
595 || (!flag_fp_int_builtin_inexact
&& flag_trapping_math
))
597 return convert_to_integer_1 (type
, CALL_EXPR_ARG (s_expr
, 0),
605 && call_expr_nargs (s_expr
) == 1
606 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (s_expr
, 0))))
608 tree newexpr
= build_call_expr (fn
, 1, CALL_EXPR_ARG (s_expr
, 0));
609 return convert_to_integer_1 (type
, newexpr
, dofold
);
613 /* Convert (int)logb(d) -> ilogb(d). */
615 && flag_unsafe_math_optimizations
616 && !flag_trapping_math
&& !flag_errno_math
&& flag_finite_math_only
618 && (outprec
> TYPE_PRECISION (integer_type_node
)
619 || (outprec
== TYPE_PRECISION (integer_type_node
)
620 && !TYPE_UNSIGNED (type
))))
622 tree s_expr
= strip_float_extensions (expr
);
623 tree s_intype
= TREE_TYPE (s_expr
);
624 const enum built_in_function fcode
= builtin_mathfn_code (s_expr
);
629 CASE_FLT_FN (BUILT_IN_LOGB
):
630 fn
= mathfn_built_in (s_intype
, BUILT_IN_ILOGB
);
638 && call_expr_nargs (s_expr
) == 1
639 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (s_expr
, 0))))
641 tree newexpr
= build_call_expr (fn
, 1, CALL_EXPR_ARG (s_expr
, 0));
642 return convert_to_integer_1 (type
, newexpr
, dofold
);
646 switch (TREE_CODE (intype
))
650 if (integer_zerop (expr
)
651 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (expr
)))
652 return build_int_cst (type
, 0);
654 /* Convert to an unsigned integer of the correct width first, and from
655 there widen/truncate to the required type. Some targets support the
656 coexistence of multiple valid pointer sizes, so fetch the one we need
659 return build1 (CONVERT_EXPR
, type
, expr
);
660 expr
= fold_build1 (CONVERT_EXPR
,
661 lang_hooks
.types
.type_for_size
662 (TYPE_PRECISION (intype
), 0),
664 return fold_convert (type
, expr
);
671 /* If this is a logical operation, which just returns 0 or 1, we can
672 change the type of the expression. */
674 if (TREE_CODE_CLASS (ex_form
) == tcc_comparison
)
676 expr
= copy_node (expr
);
677 TREE_TYPE (expr
) = type
;
681 /* If we are widening the type, put in an explicit conversion.
682 Similarly if we are not changing the width. After this, we know
683 we are truncating EXPR. */
685 else if (outprec
>= inprec
)
689 /* If the precision of the EXPR's type is K bits and the
690 destination mode has more bits, and the sign is changing,
691 it is not safe to use a NOP_EXPR. For example, suppose
692 that EXPR's type is a 3-bit unsigned integer type, the
693 TYPE is a 3-bit signed integer type, and the machine mode
694 for the types is 8-bit QImode. In that case, the
695 conversion necessitates an explicit sign-extension. In
696 the signed-to-unsigned case the high-order bits have to
698 if (TYPE_UNSIGNED (type
) != TYPE_UNSIGNED (TREE_TYPE (expr
))
699 && !type_has_mode_precision_p (TREE_TYPE (expr
)))
704 return maybe_fold_build1_loc (dofold
, loc
, code
, type
, expr
);
707 /* If TYPE is an enumeral type or a type with a precision less
708 than the number of bits in its mode, do the conversion to the
709 type corresponding to its mode, then do a nop conversion
711 else if (TREE_CODE (type
) == ENUMERAL_TYPE
712 || (TREE_CODE (type
) != BITINT_TYPE
713 && maybe_ne (outprec
,
714 GET_MODE_PRECISION (TYPE_MODE (type
)))))
717 = convert_to_integer_1 (lang_hooks
.types
.type_for_mode
718 (TYPE_MODE (type
), TYPE_UNSIGNED (type
)),
720 return maybe_fold_build1_loc (dofold
, loc
, NOP_EXPR
, type
, expr
);
723 /* Here detect when we can distribute the truncation down past some
724 arithmetic. For example, if adding two longs and converting to an
725 int, we can equally well convert both to ints and then add.
726 For the operations handled here, such truncation distribution
728 It is desirable in these cases:
729 1) when truncating down to full-word from a larger size
730 2) when truncating takes no work.
731 3) when at least one operand of the arithmetic has been extended
732 (as by C's default conversions). In this case we need two conversions
733 if we do the arithmetic as already requested, so we might as well
734 truncate both and then combine. Perhaps that way we need only one.
736 Note that in general we cannot do the arithmetic in a type
737 shorter than the desired result of conversion, even if the operands
738 are both extended from a shorter type, because they might overflow
739 if combined in that type. The exceptions to this--the times when
740 two narrow values can be combined in their narrow type even to
741 make a wider result--are handled by "shorten" in build_binary_op. */
747 /* We can pass truncation down through right shifting
748 when the shift count is a nonpositive constant. */
749 if (TREE_CODE (TREE_OPERAND (expr
, 1)) == INTEGER_CST
750 && tree_int_cst_sgn (TREE_OPERAND (expr
, 1)) <= 0)
755 /* We can pass truncation down through left shifting
756 when the shift count is a nonnegative constant and
757 the target type is unsigned. */
758 if (TREE_CODE (TREE_OPERAND (expr
, 1)) == INTEGER_CST
759 && tree_int_cst_sgn (TREE_OPERAND (expr
, 1)) >= 0
760 && TYPE_UNSIGNED (type
)
761 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
)
763 /* If shift count is less than the width of the truncated type,
765 if (tree_int_cst_lt (TREE_OPERAND (expr
, 1), TYPE_SIZE (type
)))
766 /* In this case, shifting is like multiplication. */
770 /* If it is >= that width, result is zero.
771 Handling this with trunc1 would give the wrong result:
772 (int) ((long long) a << 32) is well defined (as 0)
773 but (int) a << 32 is undefined and would get a
776 tree t
= build_int_cst (type
, 0);
778 /* If the original expression had side-effects, we must
780 if (TREE_SIDE_EFFECTS (expr
))
781 return build2 (COMPOUND_EXPR
, type
, expr
, t
);
790 tree arg0
= get_unwidened (TREE_OPERAND (expr
, 0), NULL_TREE
);
791 tree arg1
= get_unwidened (TREE_OPERAND (expr
, 1), NULL_TREE
);
793 /* Don't distribute unless the output precision is at least as
794 big as the actual inputs and it has the same signedness. */
795 if (outprec
>= TYPE_PRECISION (TREE_TYPE (arg0
))
796 && outprec
>= TYPE_PRECISION (TREE_TYPE (arg1
))
797 /* If signedness of arg0 and arg1 don't match,
798 we can't necessarily find a type to compare them in. */
799 && (TYPE_UNSIGNED (TREE_TYPE (arg0
))
800 == TYPE_UNSIGNED (TREE_TYPE (arg1
)))
801 /* Do not change the sign of the division. */
802 && (TYPE_UNSIGNED (TREE_TYPE (expr
))
803 == TYPE_UNSIGNED (TREE_TYPE (arg0
)))
804 /* Either require unsigned division or a division by
805 a constant that is not -1. */
806 && (TYPE_UNSIGNED (TREE_TYPE (arg0
))
807 || (TREE_CODE (arg1
) == INTEGER_CST
808 && !integer_all_onesp (arg1
))))
810 tree tem
= do_narrow (loc
, ex_form
, type
, arg0
, arg1
,
811 expr
, inprec
, outprec
, dofold
);
822 tree arg0
= get_unwidened (TREE_OPERAND (expr
, 0), type
);
823 tree arg1
= get_unwidened (TREE_OPERAND (expr
, 1), type
);
825 /* Don't distribute unless the output precision is at least as
826 big as the actual inputs. Otherwise, the comparison of the
827 truncated values will be wrong. */
828 if (outprec
>= TYPE_PRECISION (TREE_TYPE (arg0
))
829 && outprec
>= TYPE_PRECISION (TREE_TYPE (arg1
))
830 /* If signedness of arg0 and arg1 don't match,
831 we can't necessarily find a type to compare them in. */
832 && (TYPE_UNSIGNED (TREE_TYPE (arg0
))
833 == TYPE_UNSIGNED (TREE_TYPE (arg1
))))
845 tree arg0
= get_unwidened (TREE_OPERAND (expr
, 0), type
);
846 tree arg1
= get_unwidened (TREE_OPERAND (expr
, 1), type
);
848 /* Do not try to narrow operands of pointer subtraction;
849 that will interfere with other folding. */
850 if (ex_form
== MINUS_EXPR
851 && CONVERT_EXPR_P (arg0
)
852 && CONVERT_EXPR_P (arg1
)
853 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 0)))
854 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
857 tree tem
= do_narrow (loc
, ex_form
, type
, arg0
, arg1
,
858 expr
, inprec
, outprec
, dofold
);
865 /* Using unsigned arithmetic for signed types may hide overflow
867 if (!TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (expr
, 0)))
868 && sanitize_flags_p (SANITIZE_SI_OVERFLOW
))
872 /* This is not correct for ABS_EXPR,
873 since we must test the sign before truncation. */
875 /* Do the arithmetic in type TYPEX,
876 then convert result to TYPE. */
879 /* Can't do arithmetic in enumeral types
880 so use an integer type that will hold the values. */
881 if (TREE_CODE (typex
) == ENUMERAL_TYPE
)
883 = lang_hooks
.types
.type_for_size (TYPE_PRECISION (typex
),
884 TYPE_UNSIGNED (typex
));
886 if (!TYPE_UNSIGNED (typex
))
887 typex
= unsigned_type_for (typex
);
888 return convert (type
,
889 fold_build1 (ex_form
, typex
,
891 TREE_OPERAND (expr
, 0))));
896 tree argtype
= TREE_TYPE (TREE_OPERAND (expr
, 0));
897 /* Don't introduce a "can't convert between vector values
898 of different size" error. */
899 if (TREE_CODE (argtype
) == VECTOR_TYPE
900 && maybe_ne (GET_MODE_SIZE (TYPE_MODE (argtype
)),
901 GET_MODE_SIZE (TYPE_MODE (type
))))
904 /* If truncating after truncating, might as well do all at once.
905 If truncating after extending, we may get rid of wasted work. */
906 return convert (type
, get_unwidened (TREE_OPERAND (expr
, 0), type
));
909 /* It is sometimes worthwhile to push the narrowing down through
910 the conditional and never loses. A COND_EXPR may have a throw
911 as one operand, which then has void type. Just leave void
912 operands as they are. */
914 fold_build3 (COND_EXPR
, type
, TREE_OPERAND (expr
, 0),
915 VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 1)))
916 ? TREE_OPERAND (expr
, 1)
917 : convert (type
, TREE_OPERAND (expr
, 1)),
918 VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 2)))
919 ? TREE_OPERAND (expr
, 2)
920 : convert (type
, TREE_OPERAND (expr
, 2)));
926 /* When parsing long initializers, we might end up with a lot of casts.
928 if (TREE_CODE (tree_strip_any_location_wrapper (expr
)) == INTEGER_CST
)
929 return fold_convert (type
, expr
);
930 return build1 (CONVERT_EXPR
, type
, expr
);
933 if (sanitize_flags_p (SANITIZE_FLOAT_CAST
)
934 && current_function_decl
!= NULL_TREE
)
936 expr
= save_expr (expr
);
937 tree check
= ubsan_instrument_float_cast (loc
, type
, expr
);
938 expr
= build1 (FIX_TRUNC_EXPR
, type
, expr
);
939 if (check
== NULL_TREE
)
941 return maybe_fold_build2_loc (dofold
, loc
, COMPOUND_EXPR
,
942 TREE_TYPE (expr
), check
, expr
);
945 return build1 (FIX_TRUNC_EXPR
, type
, expr
);
947 case FIXED_POINT_TYPE
:
948 return build1 (FIXED_CONVERT_EXPR
, type
, expr
);
951 expr
= maybe_fold_build1_loc (dofold
, loc
, REALPART_EXPR
,
952 TREE_TYPE (TREE_TYPE (expr
)), expr
);
953 return convert (type
, expr
);
956 if (!tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (TREE_TYPE (expr
))))
958 error ("cannot convert a vector of type %qT"
959 " to type %qT which has different size",
960 TREE_TYPE (expr
), type
);
961 return error_mark_node
;
963 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
966 error ("aggregate value used where an integer was expected");
967 return error_mark_node
;
971 /* Convert EXPR to some integer (or enum) type TYPE.
973 EXPR must be pointer, integer, discrete (enum, char, or bool), float,
974 fixed-point or vector; in other cases error is called.
976 The result of this is always supposed to be a newly created tree node
977 not in use in any existing structure. */
980 convert_to_integer (tree type
, tree expr
)
982 return convert_to_integer_1 (type
, expr
, true);
985 /* A wrapper around convert_to_complex_1 that only folds the
986 expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P. */
989 convert_to_integer_maybe_fold (tree type
, tree expr
, bool dofold
)
992 = convert_to_integer_1 (type
, expr
,
993 dofold
|| CONSTANT_CLASS_OR_WRAPPER_P (expr
));
994 return preserve_any_location_wrapper (result
, expr
);
997 /* Convert EXPR to the complex type TYPE in the usual ways. If FOLD_P is
998 true, try to fold the expression. */
1001 convert_to_complex_1 (tree type
, tree expr
, bool fold_p
)
1003 location_t loc
= EXPR_LOCATION (expr
);
1004 tree subtype
= TREE_TYPE (type
);
1006 switch (TREE_CODE (TREE_TYPE (expr
)))
1009 case FIXED_POINT_TYPE
:
1015 tree real
= convert (subtype
, expr
);
1016 tree imag
= convert (subtype
, integer_zero_node
);
1017 if (error_operand_p (real
) || error_operand_p (imag
))
1018 return error_mark_node
;
1019 return build2 (COMPLEX_EXPR
, type
, real
, imag
);
1024 tree elt_type
= TREE_TYPE (TREE_TYPE (expr
));
1026 if (TYPE_MAIN_VARIANT (elt_type
) == TYPE_MAIN_VARIANT (subtype
))
1028 else if (TREE_CODE (expr
) == COMPOUND_EXPR
)
1030 tree t
= convert_to_complex_1 (type
, TREE_OPERAND (expr
, 1),
1032 if (t
== TREE_OPERAND (expr
, 1))
1034 return build2_loc (EXPR_LOCATION (expr
), COMPOUND_EXPR
,
1035 TREE_TYPE (t
), TREE_OPERAND (expr
, 0), t
);
1037 else if (TREE_CODE (expr
) == COMPLEX_EXPR
)
1038 return maybe_fold_build2_loc (fold_p
, loc
, COMPLEX_EXPR
, type
,
1040 TREE_OPERAND (expr
, 0)),
1042 TREE_OPERAND (expr
, 1)));
1045 expr
= save_expr (expr
);
1046 tree realp
= maybe_fold_build1_loc (fold_p
, loc
, REALPART_EXPR
,
1047 TREE_TYPE (TREE_TYPE (expr
)),
1049 tree imagp
= maybe_fold_build1_loc (fold_p
, loc
, IMAGPART_EXPR
,
1050 TREE_TYPE (TREE_TYPE (expr
)),
1052 return maybe_fold_build2_loc (fold_p
, loc
, COMPLEX_EXPR
, type
,
1053 convert (subtype
, realp
),
1054 convert (subtype
, imagp
));
1059 case REFERENCE_TYPE
:
1060 error ("pointer value used where a complex was expected");
1061 return error_mark_node
;
1064 error ("aggregate value used where a complex was expected");
1065 return error_mark_node
;
1069 /* A wrapper around convert_to_complex_1 that always folds the
1073 convert_to_complex (tree type
, tree expr
)
1075 return convert_to_complex_1 (type
, expr
, true);
1078 /* A wrapper around convert_to_complex_1 that only folds the
1079 expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P. */
1082 convert_to_complex_maybe_fold (tree type
, tree expr
, bool dofold
)
1085 = convert_to_complex_1 (type
, expr
,
1086 dofold
|| CONSTANT_CLASS_OR_WRAPPER_P (expr
));
1087 return preserve_any_location_wrapper (result
, expr
);
1090 /* Convert EXPR to the vector type TYPE in the usual ways. */
1093 convert_to_vector (tree type
, tree expr
)
1095 switch (TREE_CODE (TREE_TYPE (expr
)))
1099 if (!tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (TREE_TYPE (expr
))))
1101 error ("cannot convert a value of type %qT"
1102 " to vector type %qT which has different size",
1103 TREE_TYPE (expr
), type
);
1104 return error_mark_node
;
1106 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
1109 error ("cannot convert value to a vector");
1110 return error_mark_node
;
1114 /* Convert EXPR to some fixed-point type TYPE.
1116 EXPR must be fixed-point, float, integer, or enumeral;
1117 in other cases error is called. */
1120 convert_to_fixed (tree type
, tree expr
)
1122 if (integer_zerop (expr
))
1124 tree fixed_zero_node
= build_fixed (type
, FCONST0 (TYPE_MODE (type
)));
1125 return fixed_zero_node
;
1127 else if (integer_onep (expr
) && ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)))
1129 tree fixed_one_node
= build_fixed (type
, FCONST1 (TYPE_MODE (type
)));
1130 return fixed_one_node
;
1133 switch (TREE_CODE (TREE_TYPE (expr
)))
1135 case FIXED_POINT_TYPE
:
1140 return build1 (FIXED_CONVERT_EXPR
, type
, expr
);
1143 return convert (type
,
1144 fold_build1 (REALPART_EXPR
,
1145 TREE_TYPE (TREE_TYPE (expr
)), expr
));
1148 error ("aggregate value used where a fixed-point was expected");
1149 return error_mark_node
;
1155 namespace selftest
{
1157 /* Selftests for conversions. */
1160 test_convert_to_integer_maybe_fold (tree orig_type
, tree new_type
)
1162 /* Calling convert_to_integer_maybe_fold on an INTEGER_CST. */
1164 tree orig_cst
= build_int_cst (orig_type
, 42);
1166 /* Verify that convert_to_integer_maybe_fold on a constant returns a new
1167 constant of the new type, unless the types are the same, in which
1168 case verify it's a no-op. */
1170 tree result
= convert_to_integer_maybe_fold (new_type
,
1172 if (orig_type
!= new_type
)
1174 ASSERT_EQ (TREE_TYPE (result
), new_type
);
1175 ASSERT_EQ (TREE_CODE (result
), INTEGER_CST
);
1178 ASSERT_EQ (result
, orig_cst
);
1181 /* Calling convert_to_integer_maybe_fold on a location wrapper around
1184 Verify that convert_to_integer_maybe_fold on a location wrapper
1185 around a constant returns a new location wrapper around an equivalent
1186 constant, both of the new type, unless the types are the same,
1187 in which case the original wrapper should be returned. */
1189 const location_t loc
= BUILTINS_LOCATION
;
1190 tree wrapped_orig_cst
= maybe_wrap_with_location (orig_cst
, loc
);
1192 = convert_to_integer_maybe_fold (new_type
, wrapped_orig_cst
, false);
1193 ASSERT_EQ (TREE_TYPE (result
), new_type
);
1194 ASSERT_EQ (EXPR_LOCATION (result
), loc
);
1195 ASSERT_TRUE (location_wrapper_p (result
));
1196 ASSERT_EQ (TREE_TYPE (TREE_OPERAND (result
, 0)), new_type
);
1197 ASSERT_EQ (TREE_CODE (TREE_OPERAND (result
, 0)), INTEGER_CST
);
1199 if (orig_type
== new_type
)
1200 ASSERT_EQ (result
, wrapped_orig_cst
);
1204 /* Verify that convert_to_integer_maybe_fold preserves locations. */
1207 test_convert_to_integer_maybe_fold ()
1210 test_convert_to_integer_maybe_fold (char_type_node
, long_integer_type_node
);
1213 test_convert_to_integer_maybe_fold (char_type_node
, char_type_node
);
1216 test_convert_to_integer_maybe_fold (char_type_node
, long_integer_type_node
);
1219 test_convert_to_integer_maybe_fold (long_integer_type_node
,
1220 long_integer_type_node
);
1223 /* Run all of the selftests within this file. */
1228 test_convert_to_integer_maybe_fold ();
1231 } // namespace selftest
1233 #endif /* CHECKING_P */