1 /* Utility routines for data type conversion for GCC.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* These routines are somewhat language-independent utility function
22 intended to be called by the language-specific convert () functions. */
26 #include "coretypes.h"
29 #include "diagnostic-core.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
33 #include "langhooks.h"
36 #include "stringpool.h"
40 #define maybe_fold_build1_loc(FOLD_P, LOC, CODE, TYPE, EXPR) \
41 ((FOLD_P) ? fold_build1_loc (LOC, CODE, TYPE, EXPR) \
42 : build1_loc (LOC, CODE, TYPE, EXPR))
43 #define maybe_fold_build2_loc(FOLD_P, LOC, CODE, TYPE, EXPR1, EXPR2) \
44 ((FOLD_P) ? fold_build2_loc (LOC, CODE, TYPE, EXPR1, EXPR2) \
45 : build2_loc (LOC, CODE, TYPE, EXPR1, EXPR2))
47 /* Convert EXPR to some pointer or reference type TYPE.
48 EXPR must be pointer, reference, integer, enumeral, or literal zero;
49 in other cases error is called. If FOLD_P is true, try to fold the
53 convert_to_pointer_1 (tree type
, tree expr
, bool fold_p
)
55 location_t loc
= EXPR_LOCATION (expr
);
56 if (TREE_TYPE (expr
) == type
)
59 switch (TREE_CODE (TREE_TYPE (expr
)))
64 /* If the pointers point to different address spaces, conversion needs
65 to be done via a ADDR_SPACE_CONVERT_EXPR instead of a NOP_EXPR. */
66 addr_space_t to_as
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
67 addr_space_t from_as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr
)));
70 return maybe_fold_build1_loc (fold_p
, loc
, NOP_EXPR
, type
, expr
);
72 return maybe_fold_build1_loc (fold_p
, loc
, ADDR_SPACE_CONVERT_EXPR
,
80 /* If the input precision differs from the target pointer type
81 precision, first convert the input expression to an integer type of
82 the target precision. Some targets, e.g. VMS, need several pointer
83 sizes to coexist so the latter isn't necessarily POINTER_SIZE. */
84 unsigned int pprec
= TYPE_PRECISION (type
);
85 unsigned int eprec
= TYPE_PRECISION (TREE_TYPE (expr
));
89 = maybe_fold_build1_loc (fold_p
, loc
, NOP_EXPR
,
90 lang_hooks
.types
.type_for_size (pprec
, 0),
93 return maybe_fold_build1_loc (fold_p
, loc
, CONVERT_EXPR
, type
, expr
);
96 error ("cannot convert to a pointer type");
97 return convert_to_pointer_1 (type
, integer_zero_node
, fold_p
);
101 /* A wrapper around convert_to_pointer_1 that always folds the
105 convert_to_pointer (tree type
, tree expr
)
107 return convert_to_pointer_1 (type
, expr
, true);
110 /* A wrapper around convert_to_pointer_1 that only folds the
111 expression if DOFOLD, or if it is CONSTANT_CLASS_P. */
114 convert_to_pointer_maybe_fold (tree type
, tree expr
, bool dofold
)
116 return convert_to_pointer_1 (type
, expr
, dofold
|| CONSTANT_CLASS_P (expr
));
119 /* Convert EXPR to some floating-point type TYPE.
121 EXPR must be float, fixed-point, integer, or enumeral;
122 in other cases error is called. If FOLD_P is true, try to fold
126 convert_to_real_1 (tree type
, tree expr
, bool fold_p
)
128 enum built_in_function fcode
= builtin_mathfn_code (expr
);
129 tree itype
= TREE_TYPE (expr
);
130 location_t loc
= EXPR_LOCATION (expr
);
132 if (TREE_CODE (expr
) == COMPOUND_EXPR
)
134 tree t
= convert_to_real_1 (type
, TREE_OPERAND (expr
, 1), fold_p
);
135 if (t
== TREE_OPERAND (expr
, 1))
137 return build2_loc (EXPR_LOCATION (expr
), COMPOUND_EXPR
, TREE_TYPE (t
),
138 TREE_OPERAND (expr
, 0), t
);
141 /* Disable until we figure out how to decide whether the functions are
142 present in runtime. */
143 /* Convert (float)sqrt((double)x) where x is float into sqrtf(x) */
145 && (TYPE_MODE (type
) == TYPE_MODE (double_type_node
)
146 || TYPE_MODE (type
) == TYPE_MODE (float_type_node
)))
150 #define CASE_MATHFN(FN) case BUILT_IN_##FN: case BUILT_IN_##FN##L:
165 /* The above functions may set errno differently with float
166 input or output so this transformation is not safe with
188 /* The above functions are not safe to do this conversion. */
189 if (!flag_unsafe_math_optimizations
)
197 tree arg0
= strip_float_extensions (CALL_EXPR_ARG (expr
, 0));
200 /* We have (outertype)sqrt((innertype)x). Choose the wider mode from
201 the both as the safe type for operation. */
202 if (TYPE_PRECISION (TREE_TYPE (arg0
)) > TYPE_PRECISION (type
))
203 newtype
= TREE_TYPE (arg0
);
205 /* We consider to convert
207 (T1) sqrtT2 ((T2) exprT3)
209 (T1) sqrtT4 ((T4) exprT3)
211 , where T1 is TYPE, T2 is ITYPE, T3 is TREE_TYPE (ARG0),
212 and T4 is NEWTYPE. All those types are of floating point types.
213 T4 (NEWTYPE) should be narrower than T2 (ITYPE). This conversion
214 is safe only if P1 >= P2*2+2, where P1 and P2 are precisions of
215 T2 and T4. See the following URL for a reference:
216 http://stackoverflow.com/questions/9235456/determining-
217 floating-point-square-root
219 if ((fcode
== BUILT_IN_SQRT
|| fcode
== BUILT_IN_SQRTL
)
220 && !flag_unsafe_math_optimizations
)
222 /* The following conversion is unsafe even the precision condition
225 (float) sqrtl ((long double) double_val) -> (float) sqrt (double_val)
227 if (TYPE_MODE (type
) != TYPE_MODE (newtype
))
230 int p1
= REAL_MODE_FORMAT (TYPE_MODE (itype
))->p
;
231 int p2
= REAL_MODE_FORMAT (TYPE_MODE (newtype
))->p
;
236 /* Be careful about integer to fp conversions.
237 These may overflow still. */
238 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
239 && TYPE_PRECISION (newtype
) < TYPE_PRECISION (itype
)
240 && (TYPE_MODE (newtype
) == TYPE_MODE (double_type_node
)
241 || TYPE_MODE (newtype
) == TYPE_MODE (float_type_node
)))
243 tree fn
= mathfn_built_in (newtype
, fcode
);
246 tree arg
= convert_to_real_1 (newtype
, arg0
, fold_p
);
247 expr
= build_call_expr (fn
, 1, arg
);
258 /* Propagate the cast into the operation. */
259 if (itype
!= type
&& FLOAT_TYPE_P (type
))
260 switch (TREE_CODE (expr
))
262 /* Convert (float)-x into -(float)x. This is safe for
263 round-to-nearest rounding mode when the inner type is float. */
266 if (!flag_rounding_math
267 && FLOAT_TYPE_P (itype
)
268 && TYPE_PRECISION (type
) < TYPE_PRECISION (itype
))
270 tree arg
= convert_to_real_1 (type
, TREE_OPERAND (expr
, 0),
272 return build1 (TREE_CODE (expr
), type
, arg
);
275 /* Convert (outertype)((innertype0)a+(innertype1)b)
276 into ((newtype)a+(newtype)b) where newtype
277 is the widest mode from all of these. */
283 tree arg0
= strip_float_extensions (TREE_OPERAND (expr
, 0));
284 tree arg1
= strip_float_extensions (TREE_OPERAND (expr
, 1));
286 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
287 && FLOAT_TYPE_P (TREE_TYPE (arg1
))
288 && DECIMAL_FLOAT_TYPE_P (itype
) == DECIMAL_FLOAT_TYPE_P (type
))
292 if (TYPE_MODE (TREE_TYPE (arg0
)) == SDmode
293 || TYPE_MODE (TREE_TYPE (arg1
)) == SDmode
294 || TYPE_MODE (type
) == SDmode
)
295 newtype
= dfloat32_type_node
;
296 if (TYPE_MODE (TREE_TYPE (arg0
)) == DDmode
297 || TYPE_MODE (TREE_TYPE (arg1
)) == DDmode
298 || TYPE_MODE (type
) == DDmode
)
299 newtype
= dfloat64_type_node
;
300 if (TYPE_MODE (TREE_TYPE (arg0
)) == TDmode
301 || TYPE_MODE (TREE_TYPE (arg1
)) == TDmode
302 || TYPE_MODE (type
) == TDmode
)
303 newtype
= dfloat128_type_node
;
304 if (newtype
== dfloat32_type_node
305 || newtype
== dfloat64_type_node
306 || newtype
== dfloat128_type_node
)
308 expr
= build2 (TREE_CODE (expr
), newtype
,
309 convert_to_real_1 (newtype
, arg0
,
311 convert_to_real_1 (newtype
, arg1
,
318 if (TYPE_PRECISION (TREE_TYPE (arg0
)) > TYPE_PRECISION (newtype
))
319 newtype
= TREE_TYPE (arg0
);
320 if (TYPE_PRECISION (TREE_TYPE (arg1
)) > TYPE_PRECISION (newtype
))
321 newtype
= TREE_TYPE (arg1
);
322 /* Sometimes this transformation is safe (cannot
323 change results through affecting double rounding
324 cases) and sometimes it is not. If NEWTYPE is
325 wider than TYPE, e.g. (float)((long double)double
326 + (long double)double) converted to
327 (float)(double + double), the transformation is
328 unsafe regardless of the details of the types
329 involved; double rounding can arise if the result
330 of NEWTYPE arithmetic is a NEWTYPE value half way
331 between two representable TYPE values but the
332 exact value is sufficiently different (in the
333 right direction) for this difference to be
334 visible in ITYPE arithmetic. If NEWTYPE is the
335 same as TYPE, however, the transformation may be
336 safe depending on the types involved: it is safe
337 if the ITYPE has strictly more than twice as many
338 mantissa bits as TYPE, can represent infinities
339 and NaNs if the TYPE can, and has sufficient
340 exponent range for the product or ratio of two
341 values representable in the TYPE to be within the
342 range of normal values of ITYPE. */
343 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (itype
)
344 && (flag_unsafe_math_optimizations
345 || (TYPE_PRECISION (newtype
) == TYPE_PRECISION (type
)
346 && real_can_shorten_arithmetic (TYPE_MODE (itype
),
348 && !excess_precision_type (newtype
))))
350 expr
= build2 (TREE_CODE (expr
), newtype
,
351 convert_to_real_1 (newtype
, arg0
,
353 convert_to_real_1 (newtype
, arg1
,
365 switch (TREE_CODE (TREE_TYPE (expr
)))
368 /* Ignore the conversion if we don't need to store intermediate
369 results and neither type is a decimal float. */
370 return build1_loc (loc
,
372 || DECIMAL_FLOAT_TYPE_P (type
)
373 || DECIMAL_FLOAT_TYPE_P (itype
))
374 ? CONVERT_EXPR
: NOP_EXPR
, type
, expr
);
379 return build1 (FLOAT_EXPR
, type
, expr
);
381 case FIXED_POINT_TYPE
:
382 return build1 (FIXED_CONVERT_EXPR
, type
, expr
);
385 return convert (type
,
386 maybe_fold_build1_loc (fold_p
, loc
, REALPART_EXPR
,
387 TREE_TYPE (TREE_TYPE (expr
)),
392 error ("pointer value used where a floating point value was expected");
393 return convert_to_real_1 (type
, integer_zero_node
, fold_p
);
396 error ("aggregate value used where a float was expected");
397 return convert_to_real_1 (type
, integer_zero_node
, fold_p
);
401 /* A wrapper around convert_to_real_1 that always folds the
405 convert_to_real (tree type
, tree expr
)
407 return convert_to_real_1 (type
, expr
, true);
410 /* A wrapper around convert_to_real_1 that only folds the
411 expression if DOFOLD, or if it is CONSTANT_CLASS_P. */
414 convert_to_real_maybe_fold (tree type
, tree expr
, bool dofold
)
416 return convert_to_real_1 (type
, expr
, dofold
|| CONSTANT_CLASS_P (expr
));
419 /* Try to narrow EX_FORM ARG0 ARG1 in narrowed arg types producing a
423 do_narrow (location_t loc
,
424 enum tree_code ex_form
, tree type
, tree arg0
, tree arg1
,
425 tree expr
, unsigned inprec
, unsigned outprec
, bool dofold
)
427 /* Do the arithmetic in type TYPEX,
428 then convert result to TYPE. */
431 /* Can't do arithmetic in enumeral types
432 so use an integer type that will hold the values. */
433 if (TREE_CODE (typex
) == ENUMERAL_TYPE
)
434 typex
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (typex
),
435 TYPE_UNSIGNED (typex
));
437 /* But now perhaps TYPEX is as wide as INPREC.
438 In that case, do nothing special here.
439 (Otherwise would recurse infinitely in convert. */
440 if (TYPE_PRECISION (typex
) != inprec
)
442 /* Don't do unsigned arithmetic where signed was wanted,
444 Exception: if both of the original operands were
445 unsigned then we can safely do the work as unsigned.
446 Exception: shift operations take their type solely
447 from the first argument.
448 Exception: the LSHIFT_EXPR case above requires that
449 we perform this operation unsigned lest we produce
450 signed-overflow undefinedness.
451 And we may need to do it as unsigned
452 if we truncate to the original size. */
453 if (TYPE_UNSIGNED (TREE_TYPE (expr
))
454 || (TYPE_UNSIGNED (TREE_TYPE (arg0
))
455 && (TYPE_UNSIGNED (TREE_TYPE (arg1
))
456 || ex_form
== LSHIFT_EXPR
457 || ex_form
== RSHIFT_EXPR
458 || ex_form
== LROTATE_EXPR
459 || ex_form
== RROTATE_EXPR
))
460 || ex_form
== LSHIFT_EXPR
461 /* If we have !flag_wrapv, and either ARG0 or
462 ARG1 is of a signed type, we have to do
463 PLUS_EXPR, MINUS_EXPR or MULT_EXPR in an unsigned
464 type in case the operation in outprec precision
465 could overflow. Otherwise, we would introduce
466 signed-overflow undefinedness. */
467 || ((!TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
468 || !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
469 && ((TYPE_PRECISION (TREE_TYPE (arg0
)) * 2u
471 || (TYPE_PRECISION (TREE_TYPE (arg1
)) * 2u
473 && (ex_form
== PLUS_EXPR
474 || ex_form
== MINUS_EXPR
475 || ex_form
== MULT_EXPR
)))
477 if (!TYPE_UNSIGNED (typex
))
478 typex
= unsigned_type_for (typex
);
482 if (TYPE_UNSIGNED (typex
))
483 typex
= signed_type_for (typex
);
485 /* We should do away with all this once we have a proper
486 type promotion/demotion pass, see PR45397. */
487 expr
= maybe_fold_build2_loc (dofold
, loc
, ex_form
, typex
,
488 convert (typex
, arg0
),
489 convert (typex
, arg1
));
490 return convert (type
, expr
);
496 /* Convert EXPR to some integer (or enum) type TYPE.
498 EXPR must be pointer, integer, discrete (enum, char, or bool), float,
499 fixed-point or vector; in other cases error is called.
501 If DOFOLD is TRUE, we try to simplify newly-created patterns by folding.
503 The result of this is always supposed to be a newly created tree node
504 not in use in any existing structure. */
507 convert_to_integer_1 (tree type
, tree expr
, bool dofold
)
509 enum tree_code ex_form
= TREE_CODE (expr
);
510 tree intype
= TREE_TYPE (expr
);
511 unsigned int inprec
= element_precision (intype
);
512 unsigned int outprec
= element_precision (type
);
513 location_t loc
= EXPR_LOCATION (expr
);
515 /* An INTEGER_TYPE cannot be incomplete, but an ENUMERAL_TYPE can
516 be. Consider `enum E = { a, b = (enum E) 3 };'. */
517 if (!COMPLETE_TYPE_P (type
))
519 error ("conversion to incomplete type");
520 return error_mark_node
;
523 if (ex_form
== COMPOUND_EXPR
)
525 tree t
= convert_to_integer_1 (type
, TREE_OPERAND (expr
, 1), dofold
);
526 if (t
== TREE_OPERAND (expr
, 1))
528 return build2_loc (EXPR_LOCATION (expr
), COMPOUND_EXPR
, TREE_TYPE (t
),
529 TREE_OPERAND (expr
, 0), t
);
532 /* Convert e.g. (long)round(d) -> lround(d). */
533 /* If we're converting to char, we may encounter differing behavior
534 between converting from double->char vs double->long->char.
535 We're in "undefined" territory but we prefer to be conservative,
536 so only proceed in "unsafe" math mode. */
538 && (flag_unsafe_math_optimizations
539 || (long_integer_type_node
540 && outprec
>= TYPE_PRECISION (long_integer_type_node
))))
542 tree s_expr
= strip_float_extensions (expr
);
543 tree s_intype
= TREE_TYPE (s_expr
);
544 const enum built_in_function fcode
= builtin_mathfn_code (s_expr
);
549 CASE_FLT_FN (BUILT_IN_CEIL
):
550 /* Only convert in ISO C99 mode. */
551 if (!targetm
.libc_has_function (function_c99_misc
))
553 if (outprec
< TYPE_PRECISION (integer_type_node
)
554 || (outprec
== TYPE_PRECISION (integer_type_node
)
555 && !TYPE_UNSIGNED (type
)))
556 fn
= mathfn_built_in (s_intype
, BUILT_IN_ICEIL
);
557 else if (outprec
== TYPE_PRECISION (long_integer_type_node
)
558 && !TYPE_UNSIGNED (type
))
559 fn
= mathfn_built_in (s_intype
, BUILT_IN_LCEIL
);
560 else if (outprec
== TYPE_PRECISION (long_long_integer_type_node
)
561 && !TYPE_UNSIGNED (type
))
562 fn
= mathfn_built_in (s_intype
, BUILT_IN_LLCEIL
);
565 CASE_FLT_FN (BUILT_IN_FLOOR
):
566 /* Only convert in ISO C99 mode. */
567 if (!targetm
.libc_has_function (function_c99_misc
))
569 if (outprec
< TYPE_PRECISION (integer_type_node
)
570 || (outprec
== TYPE_PRECISION (integer_type_node
)
571 && !TYPE_UNSIGNED (type
)))
572 fn
= mathfn_built_in (s_intype
, BUILT_IN_IFLOOR
);
573 else if (outprec
== TYPE_PRECISION (long_integer_type_node
)
574 && !TYPE_UNSIGNED (type
))
575 fn
= mathfn_built_in (s_intype
, BUILT_IN_LFLOOR
);
576 else if (outprec
== TYPE_PRECISION (long_long_integer_type_node
)
577 && !TYPE_UNSIGNED (type
))
578 fn
= mathfn_built_in (s_intype
, BUILT_IN_LLFLOOR
);
581 CASE_FLT_FN (BUILT_IN_ROUND
):
582 /* Only convert in ISO C99 mode and with -fno-math-errno. */
583 if (!targetm
.libc_has_function (function_c99_misc
) || flag_errno_math
)
585 if (outprec
< TYPE_PRECISION (integer_type_node
)
586 || (outprec
== TYPE_PRECISION (integer_type_node
)
587 && !TYPE_UNSIGNED (type
)))
588 fn
= mathfn_built_in (s_intype
, BUILT_IN_IROUND
);
589 else if (outprec
== TYPE_PRECISION (long_integer_type_node
)
590 && !TYPE_UNSIGNED (type
))
591 fn
= mathfn_built_in (s_intype
, BUILT_IN_LROUND
);
592 else if (outprec
== TYPE_PRECISION (long_long_integer_type_node
)
593 && !TYPE_UNSIGNED (type
))
594 fn
= mathfn_built_in (s_intype
, BUILT_IN_LLROUND
);
597 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
598 /* Only convert nearbyint* if we can ignore math exceptions. */
599 if (flag_trapping_math
)
602 CASE_FLT_FN (BUILT_IN_RINT
):
603 /* Only convert in ISO C99 mode and with -fno-math-errno. */
604 if (!targetm
.libc_has_function (function_c99_misc
) || flag_errno_math
)
606 if (outprec
< TYPE_PRECISION (integer_type_node
)
607 || (outprec
== TYPE_PRECISION (integer_type_node
)
608 && !TYPE_UNSIGNED (type
)))
609 fn
= mathfn_built_in (s_intype
, BUILT_IN_IRINT
);
610 else if (outprec
== TYPE_PRECISION (long_integer_type_node
)
611 && !TYPE_UNSIGNED (type
))
612 fn
= mathfn_built_in (s_intype
, BUILT_IN_LRINT
);
613 else if (outprec
== TYPE_PRECISION (long_long_integer_type_node
)
614 && !TYPE_UNSIGNED (type
))
615 fn
= mathfn_built_in (s_intype
, BUILT_IN_LLRINT
);
618 CASE_FLT_FN (BUILT_IN_TRUNC
):
619 return convert_to_integer_1 (type
, CALL_EXPR_ARG (s_expr
, 0), dofold
);
627 tree newexpr
= build_call_expr (fn
, 1, CALL_EXPR_ARG (s_expr
, 0));
628 return convert_to_integer_1 (type
, newexpr
, dofold
);
632 /* Convert (int)logb(d) -> ilogb(d). */
634 && flag_unsafe_math_optimizations
635 && !flag_trapping_math
&& !flag_errno_math
&& flag_finite_math_only
637 && (outprec
> TYPE_PRECISION (integer_type_node
)
638 || (outprec
== TYPE_PRECISION (integer_type_node
)
639 && !TYPE_UNSIGNED (type
))))
641 tree s_expr
= strip_float_extensions (expr
);
642 tree s_intype
= TREE_TYPE (s_expr
);
643 const enum built_in_function fcode
= builtin_mathfn_code (s_expr
);
648 CASE_FLT_FN (BUILT_IN_LOGB
):
649 fn
= mathfn_built_in (s_intype
, BUILT_IN_ILOGB
);
658 tree newexpr
= build_call_expr (fn
, 1, CALL_EXPR_ARG (s_expr
, 0));
659 return convert_to_integer_1 (type
, newexpr
, dofold
);
663 switch (TREE_CODE (intype
))
667 if (integer_zerop (expr
))
668 return build_int_cst (type
, 0);
670 /* Convert to an unsigned integer of the correct width first, and from
671 there widen/truncate to the required type. Some targets support the
672 coexistence of multiple valid pointer sizes, so fetch the one we need
675 return build1 (CONVERT_EXPR
, type
, expr
);
676 expr
= fold_build1 (CONVERT_EXPR
,
677 lang_hooks
.types
.type_for_size
678 (TYPE_PRECISION (intype
), 0),
680 return fold_convert (type
, expr
);
686 /* If this is a logical operation, which just returns 0 or 1, we can
687 change the type of the expression. */
689 if (TREE_CODE_CLASS (ex_form
) == tcc_comparison
)
691 expr
= copy_node (expr
);
692 TREE_TYPE (expr
) = type
;
696 /* If we are widening the type, put in an explicit conversion.
697 Similarly if we are not changing the width. After this, we know
698 we are truncating EXPR. */
700 else if (outprec
>= inprec
)
704 /* If the precision of the EXPR's type is K bits and the
705 destination mode has more bits, and the sign is changing,
706 it is not safe to use a NOP_EXPR. For example, suppose
707 that EXPR's type is a 3-bit unsigned integer type, the
708 TYPE is a 3-bit signed integer type, and the machine mode
709 for the types is 8-bit QImode. In that case, the
710 conversion necessitates an explicit sign-extension. In
711 the signed-to-unsigned case the high-order bits have to
713 if (TYPE_UNSIGNED (type
) != TYPE_UNSIGNED (TREE_TYPE (expr
))
714 && !type_has_mode_precision_p (TREE_TYPE (expr
)))
719 return maybe_fold_build1_loc (dofold
, loc
, code
, type
, expr
);
722 /* If TYPE is an enumeral type or a type with a precision less
723 than the number of bits in its mode, do the conversion to the
724 type corresponding to its mode, then do a nop conversion
726 else if (TREE_CODE (type
) == ENUMERAL_TYPE
727 || outprec
!= GET_MODE_PRECISION (TYPE_MODE (type
)))
729 expr
= convert (lang_hooks
.types
.type_for_mode
730 (TYPE_MODE (type
), TYPE_UNSIGNED (type
)), expr
);
731 return maybe_fold_build1_loc (dofold
, loc
, NOP_EXPR
, type
, expr
);
734 /* Here detect when we can distribute the truncation down past some
735 arithmetic. For example, if adding two longs and converting to an
736 int, we can equally well convert both to ints and then add.
737 For the operations handled here, such truncation distribution
739 It is desirable in these cases:
740 1) when truncating down to full-word from a larger size
741 2) when truncating takes no work.
742 3) when at least one operand of the arithmetic has been extended
743 (as by C's default conversions). In this case we need two conversions
744 if we do the arithmetic as already requested, so we might as well
745 truncate both and then combine. Perhaps that way we need only one.
747 Note that in general we cannot do the arithmetic in a type
748 shorter than the desired result of conversion, even if the operands
749 are both extended from a shorter type, because they might overflow
750 if combined in that type. The exceptions to this--the times when
751 two narrow values can be combined in their narrow type even to
752 make a wider result--are handled by "shorten" in build_binary_op. */
758 /* We can pass truncation down through right shifting
759 when the shift count is a nonpositive constant. */
760 if (TREE_CODE (TREE_OPERAND (expr
, 1)) == INTEGER_CST
761 && tree_int_cst_sgn (TREE_OPERAND (expr
, 1)) <= 0)
766 /* We can pass truncation down through left shifting
767 when the shift count is a nonnegative constant and
768 the target type is unsigned. */
769 if (TREE_CODE (TREE_OPERAND (expr
, 1)) == INTEGER_CST
770 && tree_int_cst_sgn (TREE_OPERAND (expr
, 1)) >= 0
771 && TYPE_UNSIGNED (type
)
772 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
)
774 /* If shift count is less than the width of the truncated type,
776 if (tree_int_cst_lt (TREE_OPERAND (expr
, 1), TYPE_SIZE (type
)))
777 /* In this case, shifting is like multiplication. */
781 /* If it is >= that width, result is zero.
782 Handling this with trunc1 would give the wrong result:
783 (int) ((long long) a << 32) is well defined (as 0)
784 but (int) a << 32 is undefined and would get a
787 tree t
= build_int_cst (type
, 0);
789 /* If the original expression had side-effects, we must
791 if (TREE_SIDE_EFFECTS (expr
))
792 return build2 (COMPOUND_EXPR
, type
, expr
, t
);
801 tree arg0
= get_unwidened (TREE_OPERAND (expr
, 0), NULL_TREE
);
802 tree arg1
= get_unwidened (TREE_OPERAND (expr
, 1), NULL_TREE
);
804 /* Don't distribute unless the output precision is at least as
805 big as the actual inputs and it has the same signedness. */
806 if (outprec
>= TYPE_PRECISION (TREE_TYPE (arg0
))
807 && outprec
>= TYPE_PRECISION (TREE_TYPE (arg1
))
808 /* If signedness of arg0 and arg1 don't match,
809 we can't necessarily find a type to compare them in. */
810 && (TYPE_UNSIGNED (TREE_TYPE (arg0
))
811 == TYPE_UNSIGNED (TREE_TYPE (arg1
)))
812 /* Do not change the sign of the division. */
813 && (TYPE_UNSIGNED (TREE_TYPE (expr
))
814 == TYPE_UNSIGNED (TREE_TYPE (arg0
)))
815 /* Either require unsigned division or a division by
816 a constant that is not -1. */
817 && (TYPE_UNSIGNED (TREE_TYPE (arg0
))
818 || (TREE_CODE (arg1
) == INTEGER_CST
819 && !integer_all_onesp (arg1
))))
821 tree tem
= do_narrow (loc
, ex_form
, type
, arg0
, arg1
,
822 expr
, inprec
, outprec
, dofold
);
833 tree arg0
= get_unwidened (TREE_OPERAND (expr
, 0), type
);
834 tree arg1
= get_unwidened (TREE_OPERAND (expr
, 1), type
);
836 /* Don't distribute unless the output precision is at least as
837 big as the actual inputs. Otherwise, the comparison of the
838 truncated values will be wrong. */
839 if (outprec
>= TYPE_PRECISION (TREE_TYPE (arg0
))
840 && outprec
>= TYPE_PRECISION (TREE_TYPE (arg1
))
841 /* If signedness of arg0 and arg1 don't match,
842 we can't necessarily find a type to compare them in. */
843 && (TYPE_UNSIGNED (TREE_TYPE (arg0
))
844 == TYPE_UNSIGNED (TREE_TYPE (arg1
))))
856 tree arg0
= get_unwidened (TREE_OPERAND (expr
, 0), type
);
857 tree arg1
= get_unwidened (TREE_OPERAND (expr
, 1), type
);
859 /* Do not try to narrow operands of pointer subtraction;
860 that will interfere with other folding. */
861 if (ex_form
== MINUS_EXPR
862 && CONVERT_EXPR_P (arg0
)
863 && CONVERT_EXPR_P (arg1
)
864 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 0)))
865 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
868 if (outprec
>= BITS_PER_WORD
869 || TRULY_NOOP_TRUNCATION (outprec
, inprec
)
870 || inprec
> TYPE_PRECISION (TREE_TYPE (arg0
))
871 || inprec
> TYPE_PRECISION (TREE_TYPE (arg1
)))
873 tree tem
= do_narrow (loc
, ex_form
, type
, arg0
, arg1
,
874 expr
, inprec
, outprec
, dofold
);
883 /* This is not correct for ABS_EXPR,
884 since we must test the sign before truncation. */
886 /* Do the arithmetic in type TYPEX,
887 then convert result to TYPE. */
890 /* Can't do arithmetic in enumeral types
891 so use an integer type that will hold the values. */
892 if (TREE_CODE (typex
) == ENUMERAL_TYPE
)
894 = lang_hooks
.types
.type_for_size (TYPE_PRECISION (typex
),
895 TYPE_UNSIGNED (typex
));
897 if (!TYPE_UNSIGNED (typex
))
898 typex
= unsigned_type_for (typex
);
899 return convert (type
,
900 fold_build1 (ex_form
, typex
,
902 TREE_OPERAND (expr
, 0))));
906 /* Don't introduce a "can't convert between vector values of
907 different size" error. */
908 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr
, 0))) == VECTOR_TYPE
909 && (GET_MODE_SIZE (TYPE_MODE
910 (TREE_TYPE (TREE_OPERAND (expr
, 0))))
911 != GET_MODE_SIZE (TYPE_MODE (type
))))
913 /* If truncating after truncating, might as well do all at once.
914 If truncating after extending, we may get rid of wasted work. */
915 return convert (type
, get_unwidened (TREE_OPERAND (expr
, 0), type
));
918 /* It is sometimes worthwhile to push the narrowing down through
919 the conditional and never loses. A COND_EXPR may have a throw
920 as one operand, which then has void type. Just leave void
921 operands as they are. */
923 fold_build3 (COND_EXPR
, type
, TREE_OPERAND (expr
, 0),
924 VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 1)))
925 ? TREE_OPERAND (expr
, 1)
926 : convert (type
, TREE_OPERAND (expr
, 1)),
927 VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 2)))
928 ? TREE_OPERAND (expr
, 2)
929 : convert (type
, TREE_OPERAND (expr
, 2)));
935 /* When parsing long initializers, we might end up with a lot of casts.
937 if (TREE_CODE (expr
) == INTEGER_CST
)
938 return fold_convert (type
, expr
);
939 return build1 (CONVERT_EXPR
, type
, expr
);
942 if (sanitize_flags_p (SANITIZE_FLOAT_CAST
)
943 && current_function_decl
!= NULL_TREE
)
945 expr
= save_expr (expr
);
946 tree check
= ubsan_instrument_float_cast (loc
, type
, expr
);
947 expr
= build1 (FIX_TRUNC_EXPR
, type
, expr
);
948 if (check
== NULL_TREE
)
950 return maybe_fold_build2_loc (dofold
, loc
, COMPOUND_EXPR
,
951 TREE_TYPE (expr
), check
, expr
);
954 return build1 (FIX_TRUNC_EXPR
, type
, expr
);
956 case FIXED_POINT_TYPE
:
957 return build1 (FIXED_CONVERT_EXPR
, type
, expr
);
960 expr
= maybe_fold_build1_loc (dofold
, loc
, REALPART_EXPR
,
961 TREE_TYPE (TREE_TYPE (expr
)), expr
);
962 return convert (type
, expr
);
965 if (!tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (TREE_TYPE (expr
))))
967 error ("can%'t convert a vector of type %qT"
968 " to type %qT which has different size",
969 TREE_TYPE (expr
), type
);
970 return error_mark_node
;
972 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
975 error ("aggregate value used where an integer was expected");
976 return convert (type
, integer_zero_node
);
980 /* Convert EXPR to some integer (or enum) type TYPE.
982 EXPR must be pointer, integer, discrete (enum, char, or bool), float,
983 fixed-point or vector; in other cases error is called.
985 The result of this is always supposed to be a newly created tree node
986 not in use in any existing structure. */
989 convert_to_integer (tree type
, tree expr
)
991 return convert_to_integer_1 (type
, expr
, true);
994 /* A wrapper around convert_to_complex_1 that only folds the
995 expression if DOFOLD, or if it is CONSTANT_CLASS_P. */
998 convert_to_integer_maybe_fold (tree type
, tree expr
, bool dofold
)
1000 return convert_to_integer_1 (type
, expr
, dofold
|| CONSTANT_CLASS_P (expr
));
1003 /* Convert EXPR to the complex type TYPE in the usual ways. If FOLD_P is
1004 true, try to fold the expression. */
1007 convert_to_complex_1 (tree type
, tree expr
, bool fold_p
)
1009 location_t loc
= EXPR_LOCATION (expr
);
1010 tree subtype
= TREE_TYPE (type
);
1012 switch (TREE_CODE (TREE_TYPE (expr
)))
1015 case FIXED_POINT_TYPE
:
1019 return build2 (COMPLEX_EXPR
, type
, convert (subtype
, expr
),
1020 convert (subtype
, integer_zero_node
));
1024 tree elt_type
= TREE_TYPE (TREE_TYPE (expr
));
1026 if (TYPE_MAIN_VARIANT (elt_type
) == TYPE_MAIN_VARIANT (subtype
))
1028 else if (TREE_CODE (expr
) == COMPOUND_EXPR
)
1030 tree t
= convert_to_complex_1 (type
, TREE_OPERAND (expr
, 1),
1032 if (t
== TREE_OPERAND (expr
, 1))
1034 return build2_loc (EXPR_LOCATION (expr
), COMPOUND_EXPR
,
1035 TREE_TYPE (t
), TREE_OPERAND (expr
, 0), t
);
1037 else if (TREE_CODE (expr
) == COMPLEX_EXPR
)
1038 return maybe_fold_build2_loc (fold_p
, loc
, COMPLEX_EXPR
, type
,
1040 TREE_OPERAND (expr
, 0)),
1042 TREE_OPERAND (expr
, 1)));
1045 expr
= save_expr (expr
);
1046 tree realp
= maybe_fold_build1_loc (fold_p
, loc
, REALPART_EXPR
,
1047 TREE_TYPE (TREE_TYPE (expr
)),
1049 tree imagp
= maybe_fold_build1_loc (fold_p
, loc
, IMAGPART_EXPR
,
1050 TREE_TYPE (TREE_TYPE (expr
)),
1052 return maybe_fold_build2_loc (fold_p
, loc
, COMPLEX_EXPR
, type
,
1053 convert (subtype
, realp
),
1054 convert (subtype
, imagp
));
1059 case REFERENCE_TYPE
:
1060 error ("pointer value used where a complex was expected");
1061 return convert_to_complex_1 (type
, integer_zero_node
, fold_p
);
1064 error ("aggregate value used where a complex was expected");
1065 return convert_to_complex_1 (type
, integer_zero_node
, fold_p
);
1069 /* A wrapper around convert_to_complex_1 that always folds the
1073 convert_to_complex (tree type
, tree expr
)
1075 return convert_to_complex_1 (type
, expr
, true);
1078 /* A wrapper around convert_to_complex_1 that only folds the
1079 expression if DOFOLD, or if it is CONSTANT_CLASS_P. */
1082 convert_to_complex_maybe_fold (tree type
, tree expr
, bool dofold
)
1084 return convert_to_complex_1 (type
, expr
, dofold
|| CONSTANT_CLASS_P (expr
));
1087 /* Convert EXPR to the vector type TYPE in the usual ways. */
1090 convert_to_vector (tree type
, tree expr
)
1092 switch (TREE_CODE (TREE_TYPE (expr
)))
1096 if (!tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (TREE_TYPE (expr
))))
1098 error ("can%'t convert a value of type %qT"
1099 " to vector type %qT which has different size",
1100 TREE_TYPE (expr
), type
);
1101 return error_mark_node
;
1103 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
1106 error ("can%'t convert value to a vector");
1107 return error_mark_node
;
1111 /* Convert EXPR to some fixed-point type TYPE.
1113 EXPR must be fixed-point, float, integer, or enumeral;
1114 in other cases error is called. */
1117 convert_to_fixed (tree type
, tree expr
)
1119 if (integer_zerop (expr
))
1121 tree fixed_zero_node
= build_fixed (type
, FCONST0 (TYPE_MODE (type
)));
1122 return fixed_zero_node
;
1124 else if (integer_onep (expr
) && ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)))
1126 tree fixed_one_node
= build_fixed (type
, FCONST1 (TYPE_MODE (type
)));
1127 return fixed_one_node
;
1130 switch (TREE_CODE (TREE_TYPE (expr
)))
1132 case FIXED_POINT_TYPE
:
1137 return build1 (FIXED_CONVERT_EXPR
, type
, expr
);
1140 return convert (type
,
1141 fold_build1 (REALPART_EXPR
,
1142 TREE_TYPE (TREE_TYPE (expr
)), expr
));
1145 error ("aggregate value used where a fixed-point was expected");
1146 return error_mark_node
;