For typo in documentation wrt. C_PTRDIFF_T constant
[official-gcc.git] / gcc / convert.c
blob8f18ee4d2477e88d926c2eecaed6e09e3c6f3fca
1 /* Utility routines for data type conversion for GCC.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* These routines are somewhat language-independent utility function
22 intended to be called by the language-specific convert () functions. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "target.h"
28 #include "tree.h"
29 #include "diagnostic-core.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
32 #include "convert.h"
33 #include "langhooks.h"
34 #include "builtins.h"
35 #include "ubsan.h"
37 #define maybe_fold_build1_loc(FOLD_P, LOC, CODE, TYPE, EXPR) \
38 ((FOLD_P) ? fold_build1_loc (LOC, CODE, TYPE, EXPR) \
39 : build1_loc (LOC, CODE, TYPE, EXPR))
40 #define maybe_fold_build2_loc(FOLD_P, LOC, CODE, TYPE, EXPR1, EXPR2) \
41 ((FOLD_P) ? fold_build2_loc (LOC, CODE, TYPE, EXPR1, EXPR2) \
42 : build2_loc (LOC, CODE, TYPE, EXPR1, EXPR2))
44 /* Convert EXPR to some pointer or reference type TYPE.
45 EXPR must be pointer, reference, integer, enumeral, or literal zero;
46 in other cases error is called. If FOLD_P is true, try to fold the
47 expression. */
49 static tree
50 convert_to_pointer_1 (tree type, tree expr, bool fold_p)
52 location_t loc = EXPR_LOCATION (expr);
53 if (TREE_TYPE (expr) == type)
54 return expr;
56 switch (TREE_CODE (TREE_TYPE (expr)))
58 case POINTER_TYPE:
59 case REFERENCE_TYPE:
61 /* If the pointers point to different address spaces, conversion needs
62 to be done via a ADDR_SPACE_CONVERT_EXPR instead of a NOP_EXPR. */
63 addr_space_t to_as = TYPE_ADDR_SPACE (TREE_TYPE (type));
64 addr_space_t from_as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr)));
66 if (to_as == from_as)
67 return maybe_fold_build1_loc (fold_p, loc, NOP_EXPR, type, expr);
68 else
69 return maybe_fold_build1_loc (fold_p, loc, ADDR_SPACE_CONVERT_EXPR,
70 type, expr);
73 case INTEGER_TYPE:
74 case ENUMERAL_TYPE:
75 case BOOLEAN_TYPE:
77 /* If the input precision differs from the target pointer type
78 precision, first convert the input expression to an integer type of
79 the target precision. Some targets, e.g. VMS, need several pointer
80 sizes to coexist so the latter isn't necessarily POINTER_SIZE. */
81 unsigned int pprec = TYPE_PRECISION (type);
82 unsigned int eprec = TYPE_PRECISION (TREE_TYPE (expr));
84 if (eprec != pprec)
85 expr
86 = maybe_fold_build1_loc (fold_p, loc, NOP_EXPR,
87 lang_hooks.types.type_for_size (pprec, 0),
88 expr);
90 return maybe_fold_build1_loc (fold_p, loc, CONVERT_EXPR, type, expr);
92 default:
93 error ("cannot convert to a pointer type");
94 return convert_to_pointer_1 (type, integer_zero_node, fold_p);
98 /* A wrapper around convert_to_pointer_1 that always folds the
99 expression. */
101 tree
102 convert_to_pointer (tree type, tree expr)
104 return convert_to_pointer_1 (type, expr, true);
107 /* A wrapper around convert_to_pointer_1 that only folds the
108 expression if DOFOLD, or if it is CONSTANT_CLASS_P. */
110 tree
111 convert_to_pointer_maybe_fold (tree type, tree expr, bool dofold)
113 return convert_to_pointer_1 (type, expr, dofold || CONSTANT_CLASS_P (expr));
116 /* Convert EXPR to some floating-point type TYPE.
118 EXPR must be float, fixed-point, integer, or enumeral;
119 in other cases error is called. If FOLD_P is true, try to fold
120 the expression. */
122 static tree
123 convert_to_real_1 (tree type, tree expr, bool fold_p)
125 enum built_in_function fcode = builtin_mathfn_code (expr);
126 tree itype = TREE_TYPE (expr);
127 location_t loc = EXPR_LOCATION (expr);
129 if (TREE_CODE (expr) == COMPOUND_EXPR)
131 tree t = convert_to_real_1 (type, TREE_OPERAND (expr, 1), fold_p);
132 if (t == TREE_OPERAND (expr, 1))
133 return expr;
134 return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR, TREE_TYPE (t),
135 TREE_OPERAND (expr, 0), t);
138 /* Disable until we figure out how to decide whether the functions are
139 present in runtime. */
140 /* Convert (float)sqrt((double)x) where x is float into sqrtf(x) */
141 if (optimize
142 && (TYPE_MODE (type) == TYPE_MODE (double_type_node)
143 || TYPE_MODE (type) == TYPE_MODE (float_type_node)))
145 switch (fcode)
147 #define CASE_MATHFN(FN) case BUILT_IN_##FN: case BUILT_IN_##FN##L:
148 CASE_MATHFN (COSH)
149 CASE_MATHFN (EXP)
150 CASE_MATHFN (EXP10)
151 CASE_MATHFN (EXP2)
152 CASE_MATHFN (EXPM1)
153 CASE_MATHFN (GAMMA)
154 CASE_MATHFN (J0)
155 CASE_MATHFN (J1)
156 CASE_MATHFN (LGAMMA)
157 CASE_MATHFN (POW10)
158 CASE_MATHFN (SINH)
159 CASE_MATHFN (TGAMMA)
160 CASE_MATHFN (Y0)
161 CASE_MATHFN (Y1)
162 /* The above functions may set errno differently with float
163 input or output so this transformation is not safe with
164 -fmath-errno. */
165 if (flag_errno_math)
166 break;
167 gcc_fallthrough ();
168 CASE_MATHFN (ACOS)
169 CASE_MATHFN (ACOSH)
170 CASE_MATHFN (ASIN)
171 CASE_MATHFN (ASINH)
172 CASE_MATHFN (ATAN)
173 CASE_MATHFN (ATANH)
174 CASE_MATHFN (CBRT)
175 CASE_MATHFN (COS)
176 CASE_MATHFN (ERF)
177 CASE_MATHFN (ERFC)
178 CASE_MATHFN (LOG)
179 CASE_MATHFN (LOG10)
180 CASE_MATHFN (LOG2)
181 CASE_MATHFN (LOG1P)
182 CASE_MATHFN (SIN)
183 CASE_MATHFN (TAN)
184 CASE_MATHFN (TANH)
185 /* The above functions are not safe to do this conversion. */
186 if (!flag_unsafe_math_optimizations)
187 break;
188 gcc_fallthrough ();
189 CASE_MATHFN (SQRT)
190 CASE_MATHFN (FABS)
191 CASE_MATHFN (LOGB)
192 #undef CASE_MATHFN
194 tree arg0 = strip_float_extensions (CALL_EXPR_ARG (expr, 0));
195 tree newtype = type;
197 /* We have (outertype)sqrt((innertype)x). Choose the wider mode from
198 the both as the safe type for operation. */
199 if (TYPE_PRECISION (TREE_TYPE (arg0)) > TYPE_PRECISION (type))
200 newtype = TREE_TYPE (arg0);
202 /* We consider to convert
204 (T1) sqrtT2 ((T2) exprT3)
206 (T1) sqrtT4 ((T4) exprT3)
208 , where T1 is TYPE, T2 is ITYPE, T3 is TREE_TYPE (ARG0),
209 and T4 is NEWTYPE. All those types are of floating point types.
210 T4 (NEWTYPE) should be narrower than T2 (ITYPE). This conversion
211 is safe only if P1 >= P2*2+2, where P1 and P2 are precisions of
212 T2 and T4. See the following URL for a reference:
213 http://stackoverflow.com/questions/9235456/determining-
214 floating-point-square-root
216 if ((fcode == BUILT_IN_SQRT || fcode == BUILT_IN_SQRTL)
217 && !flag_unsafe_math_optimizations)
219 /* The following conversion is unsafe even the precision condition
220 below is satisfied:
222 (float) sqrtl ((long double) double_val) -> (float) sqrt (double_val)
224 if (TYPE_MODE (type) != TYPE_MODE (newtype))
225 break;
227 int p1 = REAL_MODE_FORMAT (TYPE_MODE (itype))->p;
228 int p2 = REAL_MODE_FORMAT (TYPE_MODE (newtype))->p;
229 if (p1 < p2 * 2 + 2)
230 break;
233 /* Be careful about integer to fp conversions.
234 These may overflow still. */
235 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
236 && TYPE_PRECISION (newtype) < TYPE_PRECISION (itype)
237 && (TYPE_MODE (newtype) == TYPE_MODE (double_type_node)
238 || TYPE_MODE (newtype) == TYPE_MODE (float_type_node)))
240 tree fn = mathfn_built_in (newtype, fcode);
241 if (fn)
243 tree arg = convert_to_real_1 (newtype, arg0, fold_p);
244 expr = build_call_expr (fn, 1, arg);
245 if (newtype == type)
246 return expr;
250 default:
251 break;
255 /* Propagate the cast into the operation. */
256 if (itype != type && FLOAT_TYPE_P (type))
257 switch (TREE_CODE (expr))
259 /* Convert (float)-x into -(float)x. This is safe for
260 round-to-nearest rounding mode when the inner type is float. */
261 case ABS_EXPR:
262 case NEGATE_EXPR:
263 if (!flag_rounding_math
264 && FLOAT_TYPE_P (itype)
265 && TYPE_PRECISION (type) < TYPE_PRECISION (itype))
267 tree arg = convert_to_real_1 (type, TREE_OPERAND (expr, 0),
268 fold_p);
269 return build1 (TREE_CODE (expr), type, arg);
271 break;
272 /* Convert (outertype)((innertype0)a+(innertype1)b)
273 into ((newtype)a+(newtype)b) where newtype
274 is the widest mode from all of these. */
275 case PLUS_EXPR:
276 case MINUS_EXPR:
277 case MULT_EXPR:
278 case RDIV_EXPR:
280 tree arg0 = strip_float_extensions (TREE_OPERAND (expr, 0));
281 tree arg1 = strip_float_extensions (TREE_OPERAND (expr, 1));
283 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
284 && FLOAT_TYPE_P (TREE_TYPE (arg1))
285 && DECIMAL_FLOAT_TYPE_P (itype) == DECIMAL_FLOAT_TYPE_P (type))
287 tree newtype = type;
289 if (TYPE_MODE (TREE_TYPE (arg0)) == SDmode
290 || TYPE_MODE (TREE_TYPE (arg1)) == SDmode
291 || TYPE_MODE (type) == SDmode)
292 newtype = dfloat32_type_node;
293 if (TYPE_MODE (TREE_TYPE (arg0)) == DDmode
294 || TYPE_MODE (TREE_TYPE (arg1)) == DDmode
295 || TYPE_MODE (type) == DDmode)
296 newtype = dfloat64_type_node;
297 if (TYPE_MODE (TREE_TYPE (arg0)) == TDmode
298 || TYPE_MODE (TREE_TYPE (arg1)) == TDmode
299 || TYPE_MODE (type) == TDmode)
300 newtype = dfloat128_type_node;
301 if (newtype == dfloat32_type_node
302 || newtype == dfloat64_type_node
303 || newtype == dfloat128_type_node)
305 expr = build2 (TREE_CODE (expr), newtype,
306 convert_to_real_1 (newtype, arg0,
307 fold_p),
308 convert_to_real_1 (newtype, arg1,
309 fold_p));
310 if (newtype == type)
311 return expr;
312 break;
315 if (TYPE_PRECISION (TREE_TYPE (arg0)) > TYPE_PRECISION (newtype))
316 newtype = TREE_TYPE (arg0);
317 if (TYPE_PRECISION (TREE_TYPE (arg1)) > TYPE_PRECISION (newtype))
318 newtype = TREE_TYPE (arg1);
319 /* Sometimes this transformation is safe (cannot
320 change results through affecting double rounding
321 cases) and sometimes it is not. If NEWTYPE is
322 wider than TYPE, e.g. (float)((long double)double
323 + (long double)double) converted to
324 (float)(double + double), the transformation is
325 unsafe regardless of the details of the types
326 involved; double rounding can arise if the result
327 of NEWTYPE arithmetic is a NEWTYPE value half way
328 between two representable TYPE values but the
329 exact value is sufficiently different (in the
330 right direction) for this difference to be
331 visible in ITYPE arithmetic. If NEWTYPE is the
332 same as TYPE, however, the transformation may be
333 safe depending on the types involved: it is safe
334 if the ITYPE has strictly more than twice as many
335 mantissa bits as TYPE, can represent infinities
336 and NaNs if the TYPE can, and has sufficient
337 exponent range for the product or ratio of two
338 values representable in the TYPE to be within the
339 range of normal values of ITYPE. */
340 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (itype)
341 && (flag_unsafe_math_optimizations
342 || (TYPE_PRECISION (newtype) == TYPE_PRECISION (type)
343 && real_can_shorten_arithmetic (TYPE_MODE (itype),
344 TYPE_MODE (type))
345 && !excess_precision_type (newtype))))
347 expr = build2 (TREE_CODE (expr), newtype,
348 convert_to_real_1 (newtype, arg0,
349 fold_p),
350 convert_to_real_1 (newtype, arg1,
351 fold_p));
352 if (newtype == type)
353 return expr;
357 break;
358 default:
359 break;
362 switch (TREE_CODE (TREE_TYPE (expr)))
364 case REAL_TYPE:
365 /* Ignore the conversion if we don't need to store intermediate
366 results and neither type is a decimal float. */
367 return build1_loc (loc,
368 (flag_float_store
369 || DECIMAL_FLOAT_TYPE_P (type)
370 || DECIMAL_FLOAT_TYPE_P (itype))
371 ? CONVERT_EXPR : NOP_EXPR, type, expr);
373 case INTEGER_TYPE:
374 case ENUMERAL_TYPE:
375 case BOOLEAN_TYPE:
376 return build1 (FLOAT_EXPR, type, expr);
378 case FIXED_POINT_TYPE:
379 return build1 (FIXED_CONVERT_EXPR, type, expr);
381 case COMPLEX_TYPE:
382 return convert (type,
383 maybe_fold_build1_loc (fold_p, loc, REALPART_EXPR,
384 TREE_TYPE (TREE_TYPE (expr)),
385 expr));
387 case POINTER_TYPE:
388 case REFERENCE_TYPE:
389 error ("pointer value used where a floating point value was expected");
390 return convert_to_real_1 (type, integer_zero_node, fold_p);
392 default:
393 error ("aggregate value used where a float was expected");
394 return convert_to_real_1 (type, integer_zero_node, fold_p);
398 /* A wrapper around convert_to_real_1 that always folds the
399 expression. */
401 tree
402 convert_to_real (tree type, tree expr)
404 return convert_to_real_1 (type, expr, true);
407 /* A wrapper around convert_to_real_1 that only folds the
408 expression if DOFOLD, or if it is CONSTANT_CLASS_P. */
410 tree
411 convert_to_real_maybe_fold (tree type, tree expr, bool dofold)
413 return convert_to_real_1 (type, expr, dofold || CONSTANT_CLASS_P (expr));
416 /* Convert EXPR to some integer (or enum) type TYPE.
418 EXPR must be pointer, integer, discrete (enum, char, or bool), float,
419 fixed-point or vector; in other cases error is called.
421 If DOFOLD is TRUE, we try to simplify newly-created patterns by folding.
423 The result of this is always supposed to be a newly created tree node
424 not in use in any existing structure. */
426 static tree
427 convert_to_integer_1 (tree type, tree expr, bool dofold)
429 enum tree_code ex_form = TREE_CODE (expr);
430 tree intype = TREE_TYPE (expr);
431 unsigned int inprec = element_precision (intype);
432 unsigned int outprec = element_precision (type);
433 location_t loc = EXPR_LOCATION (expr);
435 /* An INTEGER_TYPE cannot be incomplete, but an ENUMERAL_TYPE can
436 be. Consider `enum E = { a, b = (enum E) 3 };'. */
437 if (!COMPLETE_TYPE_P (type))
439 error ("conversion to incomplete type");
440 return error_mark_node;
443 if (ex_form == COMPOUND_EXPR)
445 tree t = convert_to_integer_1 (type, TREE_OPERAND (expr, 1), dofold);
446 if (t == TREE_OPERAND (expr, 1))
447 return expr;
448 return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR, TREE_TYPE (t),
449 TREE_OPERAND (expr, 0), t);
452 /* Convert e.g. (long)round(d) -> lround(d). */
453 /* If we're converting to char, we may encounter differing behavior
454 between converting from double->char vs double->long->char.
455 We're in "undefined" territory but we prefer to be conservative,
456 so only proceed in "unsafe" math mode. */
457 if (optimize
458 && (flag_unsafe_math_optimizations
459 || (long_integer_type_node
460 && outprec >= TYPE_PRECISION (long_integer_type_node))))
462 tree s_expr = strip_float_extensions (expr);
463 tree s_intype = TREE_TYPE (s_expr);
464 const enum built_in_function fcode = builtin_mathfn_code (s_expr);
465 tree fn = 0;
467 switch (fcode)
469 CASE_FLT_FN (BUILT_IN_CEIL):
470 /* Only convert in ISO C99 mode. */
471 if (!targetm.libc_has_function (function_c99_misc))
472 break;
473 if (outprec < TYPE_PRECISION (integer_type_node)
474 || (outprec == TYPE_PRECISION (integer_type_node)
475 && !TYPE_UNSIGNED (type)))
476 fn = mathfn_built_in (s_intype, BUILT_IN_ICEIL);
477 else if (outprec == TYPE_PRECISION (long_integer_type_node)
478 && !TYPE_UNSIGNED (type))
479 fn = mathfn_built_in (s_intype, BUILT_IN_LCEIL);
480 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
481 && !TYPE_UNSIGNED (type))
482 fn = mathfn_built_in (s_intype, BUILT_IN_LLCEIL);
483 break;
485 CASE_FLT_FN (BUILT_IN_FLOOR):
486 /* Only convert in ISO C99 mode. */
487 if (!targetm.libc_has_function (function_c99_misc))
488 break;
489 if (outprec < TYPE_PRECISION (integer_type_node)
490 || (outprec == TYPE_PRECISION (integer_type_node)
491 && !TYPE_UNSIGNED (type)))
492 fn = mathfn_built_in (s_intype, BUILT_IN_IFLOOR);
493 else if (outprec == TYPE_PRECISION (long_integer_type_node)
494 && !TYPE_UNSIGNED (type))
495 fn = mathfn_built_in (s_intype, BUILT_IN_LFLOOR);
496 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
497 && !TYPE_UNSIGNED (type))
498 fn = mathfn_built_in (s_intype, BUILT_IN_LLFLOOR);
499 break;
501 CASE_FLT_FN (BUILT_IN_ROUND):
502 /* Only convert in ISO C99 mode and with -fno-math-errno. */
503 if (!targetm.libc_has_function (function_c99_misc) || flag_errno_math)
504 break;
505 if (outprec < TYPE_PRECISION (integer_type_node)
506 || (outprec == TYPE_PRECISION (integer_type_node)
507 && !TYPE_UNSIGNED (type)))
508 fn = mathfn_built_in (s_intype, BUILT_IN_IROUND);
509 else if (outprec == TYPE_PRECISION (long_integer_type_node)
510 && !TYPE_UNSIGNED (type))
511 fn = mathfn_built_in (s_intype, BUILT_IN_LROUND);
512 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
513 && !TYPE_UNSIGNED (type))
514 fn = mathfn_built_in (s_intype, BUILT_IN_LLROUND);
515 break;
517 CASE_FLT_FN (BUILT_IN_NEARBYINT):
518 /* Only convert nearbyint* if we can ignore math exceptions. */
519 if (flag_trapping_math)
520 break;
521 gcc_fallthrough ();
522 CASE_FLT_FN (BUILT_IN_RINT):
523 /* Only convert in ISO C99 mode and with -fno-math-errno. */
524 if (!targetm.libc_has_function (function_c99_misc) || flag_errno_math)
525 break;
526 if (outprec < TYPE_PRECISION (integer_type_node)
527 || (outprec == TYPE_PRECISION (integer_type_node)
528 && !TYPE_UNSIGNED (type)))
529 fn = mathfn_built_in (s_intype, BUILT_IN_IRINT);
530 else if (outprec == TYPE_PRECISION (long_integer_type_node)
531 && !TYPE_UNSIGNED (type))
532 fn = mathfn_built_in (s_intype, BUILT_IN_LRINT);
533 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
534 && !TYPE_UNSIGNED (type))
535 fn = mathfn_built_in (s_intype, BUILT_IN_LLRINT);
536 break;
538 CASE_FLT_FN (BUILT_IN_TRUNC):
539 return convert_to_integer_1 (type, CALL_EXPR_ARG (s_expr, 0), dofold);
541 default:
542 break;
545 if (fn)
547 tree newexpr = build_call_expr (fn, 1, CALL_EXPR_ARG (s_expr, 0));
548 return convert_to_integer_1 (type, newexpr, dofold);
552 /* Convert (int)logb(d) -> ilogb(d). */
553 if (optimize
554 && flag_unsafe_math_optimizations
555 && !flag_trapping_math && !flag_errno_math && flag_finite_math_only
556 && integer_type_node
557 && (outprec > TYPE_PRECISION (integer_type_node)
558 || (outprec == TYPE_PRECISION (integer_type_node)
559 && !TYPE_UNSIGNED (type))))
561 tree s_expr = strip_float_extensions (expr);
562 tree s_intype = TREE_TYPE (s_expr);
563 const enum built_in_function fcode = builtin_mathfn_code (s_expr);
564 tree fn = 0;
566 switch (fcode)
568 CASE_FLT_FN (BUILT_IN_LOGB):
569 fn = mathfn_built_in (s_intype, BUILT_IN_ILOGB);
570 break;
572 default:
573 break;
576 if (fn)
578 tree newexpr = build_call_expr (fn, 1, CALL_EXPR_ARG (s_expr, 0));
579 return convert_to_integer_1 (type, newexpr, dofold);
583 switch (TREE_CODE (intype))
585 case POINTER_TYPE:
586 case REFERENCE_TYPE:
587 if (integer_zerop (expr))
588 return build_int_cst (type, 0);
590 /* Convert to an unsigned integer of the correct width first, and from
591 there widen/truncate to the required type. Some targets support the
592 coexistence of multiple valid pointer sizes, so fetch the one we need
593 from the type. */
594 if (!dofold)
595 return build1 (CONVERT_EXPR, type, expr);
596 expr = fold_build1 (CONVERT_EXPR,
597 lang_hooks.types.type_for_size
598 (TYPE_PRECISION (intype), 0),
599 expr);
600 return fold_convert (type, expr);
602 case INTEGER_TYPE:
603 case ENUMERAL_TYPE:
604 case BOOLEAN_TYPE:
605 case OFFSET_TYPE:
606 /* If this is a logical operation, which just returns 0 or 1, we can
607 change the type of the expression. */
609 if (TREE_CODE_CLASS (ex_form) == tcc_comparison)
611 expr = copy_node (expr);
612 TREE_TYPE (expr) = type;
613 return expr;
616 /* If we are widening the type, put in an explicit conversion.
617 Similarly if we are not changing the width. After this, we know
618 we are truncating EXPR. */
620 else if (outprec >= inprec)
622 enum tree_code code;
624 /* If the precision of the EXPR's type is K bits and the
625 destination mode has more bits, and the sign is changing,
626 it is not safe to use a NOP_EXPR. For example, suppose
627 that EXPR's type is a 3-bit unsigned integer type, the
628 TYPE is a 3-bit signed integer type, and the machine mode
629 for the types is 8-bit QImode. In that case, the
630 conversion necessitates an explicit sign-extension. In
631 the signed-to-unsigned case the high-order bits have to
632 be cleared. */
633 if (TYPE_UNSIGNED (type) != TYPE_UNSIGNED (TREE_TYPE (expr))
634 && (TYPE_PRECISION (TREE_TYPE (expr))
635 != GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (expr)))))
636 code = CONVERT_EXPR;
637 else
638 code = NOP_EXPR;
640 return maybe_fold_build1_loc (dofold, loc, code, type, expr);
643 /* If TYPE is an enumeral type or a type with a precision less
644 than the number of bits in its mode, do the conversion to the
645 type corresponding to its mode, then do a nop conversion
646 to TYPE. */
647 else if (TREE_CODE (type) == ENUMERAL_TYPE
648 || outprec != GET_MODE_PRECISION (TYPE_MODE (type)))
649 return build1 (NOP_EXPR, type,
650 convert (lang_hooks.types.type_for_mode
651 (TYPE_MODE (type), TYPE_UNSIGNED (type)),
652 expr));
654 /* Here detect when we can distribute the truncation down past some
655 arithmetic. For example, if adding two longs and converting to an
656 int, we can equally well convert both to ints and then add.
657 For the operations handled here, such truncation distribution
658 is always safe.
659 It is desirable in these cases:
660 1) when truncating down to full-word from a larger size
661 2) when truncating takes no work.
662 3) when at least one operand of the arithmetic has been extended
663 (as by C's default conversions). In this case we need two conversions
664 if we do the arithmetic as already requested, so we might as well
665 truncate both and then combine. Perhaps that way we need only one.
667 Note that in general we cannot do the arithmetic in a type
668 shorter than the desired result of conversion, even if the operands
669 are both extended from a shorter type, because they might overflow
670 if combined in that type. The exceptions to this--the times when
671 two narrow values can be combined in their narrow type even to
672 make a wider result--are handled by "shorten" in build_binary_op. */
674 if (dofold)
675 switch (ex_form)
677 case RSHIFT_EXPR:
678 /* We can pass truncation down through right shifting
679 when the shift count is a nonpositive constant. */
680 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
681 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) <= 0)
682 goto trunc1;
683 break;
685 case LSHIFT_EXPR:
686 /* We can pass truncation down through left shifting
687 when the shift count is a nonnegative constant and
688 the target type is unsigned. */
689 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
690 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) >= 0
691 && TYPE_UNSIGNED (type)
692 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
694 /* If shift count is less than the width of the truncated type,
695 really shift. */
696 if (tree_int_cst_lt (TREE_OPERAND (expr, 1), TYPE_SIZE (type)))
697 /* In this case, shifting is like multiplication. */
698 goto trunc1;
699 else
701 /* If it is >= that width, result is zero.
702 Handling this with trunc1 would give the wrong result:
703 (int) ((long long) a << 32) is well defined (as 0)
704 but (int) a << 32 is undefined and would get a
705 warning. */
707 tree t = build_int_cst (type, 0);
709 /* If the original expression had side-effects, we must
710 preserve it. */
711 if (TREE_SIDE_EFFECTS (expr))
712 return build2 (COMPOUND_EXPR, type, expr, t);
713 else
714 return t;
717 break;
719 case TRUNC_DIV_EXPR:
721 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), type);
722 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), type);
724 /* Don't distribute unless the output precision is at least as
725 big as the actual inputs and it has the same signedness. */
726 if (outprec >= TYPE_PRECISION (TREE_TYPE (arg0))
727 && outprec >= TYPE_PRECISION (TREE_TYPE (arg1))
728 /* If signedness of arg0 and arg1 don't match,
729 we can't necessarily find a type to compare them in. */
730 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
731 == TYPE_UNSIGNED (TREE_TYPE (arg1)))
732 /* Do not change the sign of the division. */
733 && (TYPE_UNSIGNED (TREE_TYPE (expr))
734 == TYPE_UNSIGNED (TREE_TYPE (arg0)))
735 /* Either require unsigned division or a division by
736 a constant that is not -1. */
737 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
738 || (TREE_CODE (arg1) == INTEGER_CST
739 && !integer_all_onesp (arg1))))
740 goto trunc1;
741 break;
744 case MAX_EXPR:
745 case MIN_EXPR:
746 case MULT_EXPR:
748 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), type);
749 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), type);
751 /* Don't distribute unless the output precision is at least as
752 big as the actual inputs. Otherwise, the comparison of the
753 truncated values will be wrong. */
754 if (outprec >= TYPE_PRECISION (TREE_TYPE (arg0))
755 && outprec >= TYPE_PRECISION (TREE_TYPE (arg1))
756 /* If signedness of arg0 and arg1 don't match,
757 we can't necessarily find a type to compare them in. */
758 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
759 == TYPE_UNSIGNED (TREE_TYPE (arg1))))
760 goto trunc1;
761 break;
764 case PLUS_EXPR:
765 case MINUS_EXPR:
766 case BIT_AND_EXPR:
767 case BIT_IOR_EXPR:
768 case BIT_XOR_EXPR:
769 trunc1:
771 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), type);
772 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), type);
774 /* Do not try to narrow operands of pointer subtraction;
775 that will interfere with other folding. */
776 if (ex_form == MINUS_EXPR
777 && CONVERT_EXPR_P (arg0)
778 && CONVERT_EXPR_P (arg1)
779 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
780 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
781 break;
783 if (outprec >= BITS_PER_WORD
784 || TRULY_NOOP_TRUNCATION (outprec, inprec)
785 || inprec > TYPE_PRECISION (TREE_TYPE (arg0))
786 || inprec > TYPE_PRECISION (TREE_TYPE (arg1)))
788 /* Do the arithmetic in type TYPEX,
789 then convert result to TYPE. */
790 tree typex = type;
792 /* Can't do arithmetic in enumeral types
793 so use an integer type that will hold the values. */
794 if (TREE_CODE (typex) == ENUMERAL_TYPE)
795 typex
796 = lang_hooks.types.type_for_size (TYPE_PRECISION (typex),
797 TYPE_UNSIGNED (typex));
799 /* But now perhaps TYPEX is as wide as INPREC.
800 In that case, do nothing special here.
801 (Otherwise would recurse infinitely in convert. */
802 if (TYPE_PRECISION (typex) != inprec)
804 /* Don't do unsigned arithmetic where signed was wanted,
805 or vice versa.
806 Exception: if both of the original operands were
807 unsigned then we can safely do the work as unsigned.
808 Exception: shift operations take their type solely
809 from the first argument.
810 Exception: the LSHIFT_EXPR case above requires that
811 we perform this operation unsigned lest we produce
812 signed-overflow undefinedness.
813 And we may need to do it as unsigned
814 if we truncate to the original size. */
815 if (TYPE_UNSIGNED (TREE_TYPE (expr))
816 || (TYPE_UNSIGNED (TREE_TYPE (arg0))
817 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
818 || ex_form == LSHIFT_EXPR
819 || ex_form == RSHIFT_EXPR
820 || ex_form == LROTATE_EXPR
821 || ex_form == RROTATE_EXPR))
822 || ex_form == LSHIFT_EXPR
823 /* If we have !flag_wrapv, and either ARG0 or
824 ARG1 is of a signed type, we have to do
825 PLUS_EXPR, MINUS_EXPR or MULT_EXPR in an unsigned
826 type in case the operation in outprec precision
827 could overflow. Otherwise, we would introduce
828 signed-overflow undefinedness. */
829 || ((!TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
830 || !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
831 && ((TYPE_PRECISION (TREE_TYPE (arg0)) * 2u
832 > outprec)
833 || (TYPE_PRECISION (TREE_TYPE (arg1)) * 2u
834 > outprec))
835 && (ex_form == PLUS_EXPR
836 || ex_form == MINUS_EXPR
837 || ex_form == MULT_EXPR)))
839 if (!TYPE_UNSIGNED (typex))
840 typex = unsigned_type_for (typex);
842 else
844 if (TYPE_UNSIGNED (typex))
845 typex = signed_type_for (typex);
847 /* We should do away with all this once we have a proper
848 type promotion/demotion pass, see PR45397. */
849 expr = maybe_fold_build2_loc (dofold, loc, ex_form, typex,
850 convert (typex, arg0),
851 convert (typex, arg1));
852 return convert (type, expr);
856 break;
858 case NEGATE_EXPR:
859 case BIT_NOT_EXPR:
860 /* This is not correct for ABS_EXPR,
861 since we must test the sign before truncation. */
863 /* Do the arithmetic in type TYPEX,
864 then convert result to TYPE. */
865 tree typex = type;
867 /* Can't do arithmetic in enumeral types
868 so use an integer type that will hold the values. */
869 if (TREE_CODE (typex) == ENUMERAL_TYPE)
870 typex
871 = lang_hooks.types.type_for_size (TYPE_PRECISION (typex),
872 TYPE_UNSIGNED (typex));
874 if (!TYPE_UNSIGNED (typex))
875 typex = unsigned_type_for (typex);
876 return convert (type,
877 fold_build1 (ex_form, typex,
878 convert (typex,
879 TREE_OPERAND (expr, 0))));
882 CASE_CONVERT:
883 /* Don't introduce a "can't convert between vector values of
884 different size" error. */
885 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == VECTOR_TYPE
886 && (GET_MODE_SIZE (TYPE_MODE
887 (TREE_TYPE (TREE_OPERAND (expr, 0))))
888 != GET_MODE_SIZE (TYPE_MODE (type))))
889 break;
890 /* If truncating after truncating, might as well do all at once.
891 If truncating after extending, we may get rid of wasted work. */
892 return convert (type, get_unwidened (TREE_OPERAND (expr, 0), type));
894 case COND_EXPR:
895 /* It is sometimes worthwhile to push the narrowing down through
896 the conditional and never loses. A COND_EXPR may have a throw
897 as one operand, which then has void type. Just leave void
898 operands as they are. */
899 return
900 fold_build3 (COND_EXPR, type, TREE_OPERAND (expr, 0),
901 VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1)))
902 ? TREE_OPERAND (expr, 1)
903 : convert (type, TREE_OPERAND (expr, 1)),
904 VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 2)))
905 ? TREE_OPERAND (expr, 2)
906 : convert (type, TREE_OPERAND (expr, 2)));
908 default:
909 break;
912 /* When parsing long initializers, we might end up with a lot of casts.
913 Shortcut this. */
914 if (TREE_CODE (expr) == INTEGER_CST)
915 return fold_convert (type, expr);
916 return build1 (CONVERT_EXPR, type, expr);
918 case REAL_TYPE:
919 if (flag_sanitize & SANITIZE_FLOAT_CAST
920 && do_ubsan_in_current_function ())
922 expr = save_expr (expr);
923 tree check = ubsan_instrument_float_cast (loc, type, expr);
924 expr = build1 (FIX_TRUNC_EXPR, type, expr);
925 if (check == NULL_TREE)
926 return expr;
927 return maybe_fold_build2_loc (dofold, loc, COMPOUND_EXPR,
928 TREE_TYPE (expr), check, expr);
930 else
931 return build1 (FIX_TRUNC_EXPR, type, expr);
933 case FIXED_POINT_TYPE:
934 return build1 (FIXED_CONVERT_EXPR, type, expr);
936 case COMPLEX_TYPE:
937 expr = maybe_fold_build1_loc (dofold, loc, REALPART_EXPR,
938 TREE_TYPE (TREE_TYPE (expr)), expr);
939 return convert (type, expr);
941 case VECTOR_TYPE:
942 if (!tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (TREE_TYPE (expr))))
944 error ("can%'t convert a vector of type %qT"
945 " to type %qT which has different size",
946 TREE_TYPE (expr), type);
947 return error_mark_node;
949 return build1 (VIEW_CONVERT_EXPR, type, expr);
951 default:
952 error ("aggregate value used where an integer was expected");
953 return convert (type, integer_zero_node);
957 /* Convert EXPR to some integer (or enum) type TYPE.
959 EXPR must be pointer, integer, discrete (enum, char, or bool), float,
960 fixed-point or vector; in other cases error is called.
962 The result of this is always supposed to be a newly created tree node
963 not in use in any existing structure. */
965 tree
966 convert_to_integer (tree type, tree expr)
968 return convert_to_integer_1 (type, expr, true);
971 /* A wrapper around convert_to_complex_1 that only folds the
972 expression if DOFOLD, or if it is CONSTANT_CLASS_P. */
974 tree
975 convert_to_integer_maybe_fold (tree type, tree expr, bool dofold)
977 return convert_to_integer_1 (type, expr, dofold || CONSTANT_CLASS_P (expr));
980 /* Convert EXPR to the complex type TYPE in the usual ways. If FOLD_P is
981 true, try to fold the expression. */
983 static tree
984 convert_to_complex_1 (tree type, tree expr, bool fold_p)
986 location_t loc = EXPR_LOCATION (expr);
987 tree subtype = TREE_TYPE (type);
989 switch (TREE_CODE (TREE_TYPE (expr)))
991 case REAL_TYPE:
992 case FIXED_POINT_TYPE:
993 case INTEGER_TYPE:
994 case ENUMERAL_TYPE:
995 case BOOLEAN_TYPE:
996 return build2 (COMPLEX_EXPR, type, convert (subtype, expr),
997 convert (subtype, integer_zero_node));
999 case COMPLEX_TYPE:
1001 tree elt_type = TREE_TYPE (TREE_TYPE (expr));
1003 if (TYPE_MAIN_VARIANT (elt_type) == TYPE_MAIN_VARIANT (subtype))
1004 return expr;
1005 else if (TREE_CODE (expr) == COMPOUND_EXPR)
1007 tree t = convert_to_complex_1 (type, TREE_OPERAND (expr, 1),
1008 fold_p);
1009 if (t == TREE_OPERAND (expr, 1))
1010 return expr;
1011 return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR,
1012 TREE_TYPE (t), TREE_OPERAND (expr, 0), t);
1014 else if (TREE_CODE (expr) == COMPLEX_EXPR)
1015 return maybe_fold_build2_loc (fold_p, loc, COMPLEX_EXPR, type,
1016 convert (subtype,
1017 TREE_OPERAND (expr, 0)),
1018 convert (subtype,
1019 TREE_OPERAND (expr, 1)));
1020 else
1022 expr = save_expr (expr);
1023 tree realp = maybe_fold_build1_loc (fold_p, loc, REALPART_EXPR,
1024 TREE_TYPE (TREE_TYPE (expr)),
1025 expr);
1026 tree imagp = maybe_fold_build1_loc (fold_p, loc, IMAGPART_EXPR,
1027 TREE_TYPE (TREE_TYPE (expr)),
1028 expr);
1029 return maybe_fold_build2_loc (fold_p, loc, COMPLEX_EXPR, type,
1030 convert (subtype, realp),
1031 convert (subtype, imagp));
1035 case POINTER_TYPE:
1036 case REFERENCE_TYPE:
1037 error ("pointer value used where a complex was expected");
1038 return convert_to_complex_1 (type, integer_zero_node, fold_p);
1040 default:
1041 error ("aggregate value used where a complex was expected");
1042 return convert_to_complex_1 (type, integer_zero_node, fold_p);
1046 /* A wrapper around convert_to_complex_1 that always folds the
1047 expression. */
1049 tree
1050 convert_to_complex (tree type, tree expr)
1052 return convert_to_complex_1 (type, expr, true);
1055 /* A wrapper around convert_to_complex_1 that only folds the
1056 expression if DOFOLD, or if it is CONSTANT_CLASS_P. */
1058 tree
1059 convert_to_complex_maybe_fold (tree type, tree expr, bool dofold)
1061 return convert_to_complex_1 (type, expr, dofold || CONSTANT_CLASS_P (expr));
1064 /* Convert EXPR to the vector type TYPE in the usual ways. */
1066 tree
1067 convert_to_vector (tree type, tree expr)
1069 switch (TREE_CODE (TREE_TYPE (expr)))
1071 case INTEGER_TYPE:
1072 case VECTOR_TYPE:
1073 if (!tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (TREE_TYPE (expr))))
1075 error ("can%'t convert a value of type %qT"
1076 " to vector type %qT which has different size",
1077 TREE_TYPE (expr), type);
1078 return error_mark_node;
1080 return build1 (VIEW_CONVERT_EXPR, type, expr);
1082 default:
1083 error ("can%'t convert value to a vector");
1084 return error_mark_node;
1088 /* Convert EXPR to some fixed-point type TYPE.
1090 EXPR must be fixed-point, float, integer, or enumeral;
1091 in other cases error is called. */
1093 tree
1094 convert_to_fixed (tree type, tree expr)
1096 if (integer_zerop (expr))
1098 tree fixed_zero_node = build_fixed (type, FCONST0 (TYPE_MODE (type)));
1099 return fixed_zero_node;
1101 else if (integer_onep (expr) && ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)))
1103 tree fixed_one_node = build_fixed (type, FCONST1 (TYPE_MODE (type)));
1104 return fixed_one_node;
1107 switch (TREE_CODE (TREE_TYPE (expr)))
1109 case FIXED_POINT_TYPE:
1110 case INTEGER_TYPE:
1111 case ENUMERAL_TYPE:
1112 case BOOLEAN_TYPE:
1113 case REAL_TYPE:
1114 return build1 (FIXED_CONVERT_EXPR, type, expr);
1116 case COMPLEX_TYPE:
1117 return convert (type,
1118 fold_build1 (REALPART_EXPR,
1119 TREE_TYPE (TREE_TYPE (expr)), expr));
1121 default:
1122 error ("aggregate value used where a fixed-point was expected");
1123 return error_mark_node;