c++: trait patch tweak
[official-gcc.git] / gcc / convert.cc
blob46c8bcb31f821f17cda9d6124cc2a55ce32e4ca0
1 /* Utility routines for data type conversion for GCC.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* These routines are somewhat language-independent utility function
22 intended to be called by the language-specific convert () functions. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "target.h"
28 #include "tree.h"
29 #include "diagnostic-core.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
32 #include "convert.h"
33 #include "langhooks.h"
34 #include "builtins.h"
35 #include "ubsan.h"
36 #include "stringpool.h"
37 #include "attribs.h"
38 #include "asan.h"
39 #include "selftest.h"
41 #define maybe_fold_build1_loc(FOLD_P, LOC, CODE, TYPE, EXPR) \
42 ((FOLD_P) ? fold_build1_loc (LOC, CODE, TYPE, EXPR) \
43 : build1_loc (LOC, CODE, TYPE, EXPR))
44 #define maybe_fold_build2_loc(FOLD_P, LOC, CODE, TYPE, EXPR1, EXPR2) \
45 ((FOLD_P) ? fold_build2_loc (LOC, CODE, TYPE, EXPR1, EXPR2) \
46 : build2_loc (LOC, CODE, TYPE, EXPR1, EXPR2))
48 /* Convert EXPR to some pointer or reference type TYPE.
49 EXPR must be pointer, reference, integer, enumeral, or literal zero;
50 in other cases error is called. If FOLD_P is true, try to fold the
51 expression. */
53 static tree
54 convert_to_pointer_1 (tree type, tree expr, bool fold_p)
56 location_t loc = EXPR_LOCATION (expr);
57 if (TREE_TYPE (expr) == type)
58 return expr;
60 switch (TREE_CODE (TREE_TYPE (expr)))
62 case POINTER_TYPE:
63 case REFERENCE_TYPE:
65 /* If the pointers point to different address spaces, conversion needs
66 to be done via a ADDR_SPACE_CONVERT_EXPR instead of a NOP_EXPR. */
67 addr_space_t to_as = TYPE_ADDR_SPACE (TREE_TYPE (type));
68 addr_space_t from_as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr)));
70 if (to_as == from_as)
71 return maybe_fold_build1_loc (fold_p, loc, NOP_EXPR, type, expr);
72 else
73 return maybe_fold_build1_loc (fold_p, loc, ADDR_SPACE_CONVERT_EXPR,
74 type, expr);
77 case INTEGER_TYPE:
78 case ENUMERAL_TYPE:
79 case BOOLEAN_TYPE:
80 case BITINT_TYPE:
82 /* If the input precision differs from the target pointer type
83 precision, first convert the input expression to an integer type of
84 the target precision. Some targets, e.g. VMS, need several pointer
85 sizes to coexist so the latter isn't necessarily POINTER_SIZE. */
86 unsigned int pprec = TYPE_PRECISION (type);
87 unsigned int eprec = TYPE_PRECISION (TREE_TYPE (expr));
89 if (eprec != pprec)
90 expr
91 = maybe_fold_build1_loc (fold_p, loc, NOP_EXPR,
92 lang_hooks.types.type_for_size (pprec, 0),
93 expr);
95 return maybe_fold_build1_loc (fold_p, loc, CONVERT_EXPR, type, expr);
97 default:
98 error ("cannot convert to a pointer type");
99 return error_mark_node;
103 /* Subroutine of the various convert_to_*_maybe_fold routines.
105 If a location wrapper has been folded to a constant (presumably of
106 a different type), re-wrap the new constant with a location wrapper. */
108 tree
109 preserve_any_location_wrapper (tree result, tree orig_expr)
111 if (CONSTANT_CLASS_P (result) && location_wrapper_p (orig_expr))
113 if (result == TREE_OPERAND (orig_expr, 0))
114 return orig_expr;
115 else
116 return maybe_wrap_with_location (result, EXPR_LOCATION (orig_expr));
119 return result;
122 /* A wrapper around convert_to_pointer_1 that always folds the
123 expression. */
125 tree
126 convert_to_pointer (tree type, tree expr)
128 return convert_to_pointer_1 (type, expr, true);
131 /* A wrapper around convert_to_pointer_1 that only folds the
132 expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P. */
134 tree
135 convert_to_pointer_maybe_fold (tree type, tree expr, bool dofold)
137 tree result
138 = convert_to_pointer_1 (type, expr,
139 dofold || CONSTANT_CLASS_OR_WRAPPER_P (expr));
140 return preserve_any_location_wrapper (result, expr);
143 /* Convert EXPR to some floating-point type TYPE.
145 EXPR must be float, fixed-point, integer, or enumeral;
146 in other cases error is called. If FOLD_P is true, try to fold
147 the expression. */
149 static tree
150 convert_to_real_1 (tree type, tree expr, bool fold_p)
152 enum built_in_function fcode = builtin_mathfn_code (expr);
153 tree itype = TREE_TYPE (expr);
154 location_t loc = EXPR_LOCATION (expr);
156 if (TREE_CODE (expr) == COMPOUND_EXPR)
158 tree t = convert_to_real_1 (type, TREE_OPERAND (expr, 1), fold_p);
159 if (t == TREE_OPERAND (expr, 1))
160 return expr;
161 return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR, TREE_TYPE (t),
162 TREE_OPERAND (expr, 0), t);
165 /* Disable until we figure out how to decide whether the functions are
166 present in runtime. */
167 /* Convert (float)sqrt((double)x) where x is float into sqrtf(x) */
168 if (optimize
169 && (TYPE_MODE (type) == TYPE_MODE (double_type_node)
170 || TYPE_MODE (type) == TYPE_MODE (float_type_node)))
172 switch (fcode)
174 #define CASE_MATHFN(FN) case BUILT_IN_##FN: case BUILT_IN_##FN##L:
175 CASE_MATHFN (COSH)
176 CASE_MATHFN (EXP)
177 CASE_MATHFN (EXP10)
178 CASE_MATHFN (EXP2)
179 CASE_MATHFN (EXPM1)
180 CASE_MATHFN (GAMMA)
181 CASE_MATHFN (J0)
182 CASE_MATHFN (J1)
183 CASE_MATHFN (LGAMMA)
184 CASE_MATHFN (POW10)
185 CASE_MATHFN (SINH)
186 CASE_MATHFN (TGAMMA)
187 CASE_MATHFN (Y0)
188 CASE_MATHFN (Y1)
189 /* The above functions may set errno differently with float
190 input or output so this transformation is not safe with
191 -fmath-errno. */
192 if (flag_errno_math)
193 break;
194 gcc_fallthrough ();
195 CASE_MATHFN (ACOS)
196 CASE_MATHFN (ACOSH)
197 CASE_MATHFN (ASIN)
198 CASE_MATHFN (ASINH)
199 CASE_MATHFN (ATAN)
200 CASE_MATHFN (ATANH)
201 CASE_MATHFN (CBRT)
202 CASE_MATHFN (COS)
203 CASE_MATHFN (ERF)
204 CASE_MATHFN (ERFC)
205 CASE_MATHFN (LOG)
206 CASE_MATHFN (LOG10)
207 CASE_MATHFN (LOG2)
208 CASE_MATHFN (LOG1P)
209 CASE_MATHFN (SIN)
210 CASE_MATHFN (TAN)
211 CASE_MATHFN (TANH)
212 /* The above functions are not safe to do this conversion. */
213 if (!flag_unsafe_math_optimizations)
214 break;
215 gcc_fallthrough ();
216 CASE_MATHFN (SQRT)
217 CASE_MATHFN (FABS)
218 CASE_MATHFN (LOGB)
219 #undef CASE_MATHFN
220 if (call_expr_nargs (expr) != 1
221 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (expr, 0))))
222 break;
224 tree arg0 = strip_float_extensions (CALL_EXPR_ARG (expr, 0));
225 tree newtype = type;
227 /* We have (outertype)sqrt((innertype)x). Choose the wider mode
228 from the both as the safe type for operation. */
229 if (TYPE_PRECISION (TREE_TYPE (arg0)) > TYPE_PRECISION (type))
230 newtype = TREE_TYPE (arg0);
232 /* We consider to convert
234 (T1) sqrtT2 ((T2) exprT3)
236 (T1) sqrtT4 ((T4) exprT3)
238 , where T1 is TYPE, T2 is ITYPE, T3 is TREE_TYPE (ARG0),
239 and T4 is NEWTYPE. All those types are of floating-point types.
240 T4 (NEWTYPE) should be narrower than T2 (ITYPE). This conversion
241 is safe only if P1 >= P2*2+2, where P1 and P2 are precisions of
242 T2 and T4. See the following URL for a reference:
243 http://stackoverflow.com/questions/9235456/determining-
244 floating-point-square-root
246 if ((fcode == BUILT_IN_SQRT || fcode == BUILT_IN_SQRTL)
247 && !flag_unsafe_math_optimizations)
249 /* The following conversion is unsafe even the precision condition
250 below is satisfied:
252 (float) sqrtl ((long double) double_val) -> (float) sqrt (double_val)
254 if (TYPE_MODE (type) != TYPE_MODE (newtype))
255 break;
257 int p1 = REAL_MODE_FORMAT (TYPE_MODE (itype))->p;
258 int p2 = REAL_MODE_FORMAT (TYPE_MODE (newtype))->p;
259 if (p1 < p2 * 2 + 2)
260 break;
263 /* Be careful about integer to fp conversions.
264 These may overflow still. */
265 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
266 && TYPE_PRECISION (newtype) < TYPE_PRECISION (itype)
267 && (TYPE_MODE (newtype) == TYPE_MODE (double_type_node)
268 || TYPE_MODE (newtype) == TYPE_MODE (float_type_node)))
270 tree fn = mathfn_built_in (newtype, fcode);
271 if (fn)
273 tree arg = convert_to_real_1 (newtype, arg0, fold_p);
274 expr = build_call_expr (fn, 1, arg);
275 if (newtype == type)
276 return expr;
280 default:
281 break;
285 /* Propagate the cast into the operation. */
286 if (itype != type && FLOAT_TYPE_P (type))
287 switch (TREE_CODE (expr))
289 /* Convert (float)-x into -(float)x. This is safe for
290 round-to-nearest rounding mode when the inner type is float. */
291 case ABS_EXPR:
292 case NEGATE_EXPR:
293 if (!flag_rounding_math
294 && FLOAT_TYPE_P (itype)
295 && element_precision (type) < element_precision (itype))
297 tree arg = convert_to_real_1 (type, TREE_OPERAND (expr, 0),
298 fold_p);
299 return build1 (TREE_CODE (expr), type, arg);
301 break;
302 default:
303 break;
306 switch (TREE_CODE (TREE_TYPE (expr)))
308 case REAL_TYPE:
309 /* Ignore the conversion if we don't need to store intermediate
310 results and neither type is a decimal float. */
311 return build1_loc (loc,
312 (flag_float_store
313 || DECIMAL_FLOAT_TYPE_P (type)
314 || DECIMAL_FLOAT_TYPE_P (itype))
315 ? CONVERT_EXPR : NOP_EXPR, type, expr);
317 case INTEGER_TYPE:
318 case ENUMERAL_TYPE:
319 case BOOLEAN_TYPE:
320 case BITINT_TYPE:
321 return build1 (FLOAT_EXPR, type, expr);
323 case FIXED_POINT_TYPE:
324 return build1 (FIXED_CONVERT_EXPR, type, expr);
326 case COMPLEX_TYPE:
327 return convert (type,
328 maybe_fold_build1_loc (fold_p, loc, REALPART_EXPR,
329 TREE_TYPE (TREE_TYPE (expr)),
330 expr));
332 case POINTER_TYPE:
333 case REFERENCE_TYPE:
334 error ("pointer value used where a floating-point was expected");
335 return error_mark_node;
337 case VECTOR_TYPE:
338 error ("vector value used where a floating-point was expected");
339 return error_mark_node;
341 default:
342 error ("aggregate value used where a floating-point was expected");
343 return error_mark_node;
347 /* A wrapper around convert_to_real_1 that always folds the
348 expression. */
350 tree
351 convert_to_real (tree type, tree expr)
353 return convert_to_real_1 (type, expr, true);
356 /* A wrapper around convert_to_real_1 that only folds the
357 expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P. */
359 tree
360 convert_to_real_maybe_fold (tree type, tree expr, bool dofold)
362 tree result
363 = convert_to_real_1 (type, expr,
364 dofold || CONSTANT_CLASS_OR_WRAPPER_P (expr));
365 return preserve_any_location_wrapper (result, expr);
368 /* Try to narrow EX_FORM ARG0 ARG1 in narrowed arg types producing a
369 result in TYPE. */
371 static tree
372 do_narrow (location_t loc,
373 enum tree_code ex_form, tree type, tree arg0, tree arg1,
374 tree expr, unsigned inprec, unsigned outprec, bool dofold)
376 /* Do the arithmetic in type TYPEX,
377 then convert result to TYPE. */
378 tree typex = type;
380 /* Can't do arithmetic in enumeral types
381 so use an integer type that will hold the values. */
382 if (TREE_CODE (typex) == ENUMERAL_TYPE)
383 typex = lang_hooks.types.type_for_size (TYPE_PRECISION (typex),
384 TYPE_UNSIGNED (typex));
386 /* The type demotion below might cause doing unsigned arithmetic
387 instead of signed, and thus hide overflow bugs. */
388 if ((ex_form == PLUS_EXPR || ex_form == MINUS_EXPR)
389 && !TYPE_UNSIGNED (typex)
390 && sanitize_flags_p (SANITIZE_SI_OVERFLOW))
391 return NULL_TREE;
393 /* Similarly for multiplication, but in that case it can be
394 problematic even if typex is unsigned type - 0xffff * 0xffff
395 overflows in int. */
396 if (ex_form == MULT_EXPR
397 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (expr))
398 && sanitize_flags_p (SANITIZE_SI_OVERFLOW))
399 return NULL_TREE;
401 /* But now perhaps TYPEX is as wide as INPREC.
402 In that case, do nothing special here.
403 (Otherwise would recurse infinitely in convert. */
404 if (TYPE_PRECISION (typex) != inprec)
406 /* Don't do unsigned arithmetic where signed was wanted,
407 or vice versa.
408 Exception: if both of the original operands were
409 unsigned then we can safely do the work as unsigned.
410 Exception: shift operations take their type solely
411 from the first argument.
412 Exception: the LSHIFT_EXPR case above requires that
413 we perform this operation unsigned lest we produce
414 signed-overflow undefinedness.
415 And we may need to do it as unsigned
416 if we truncate to the original size. */
417 if (TYPE_UNSIGNED (TREE_TYPE (expr))
418 || (TYPE_UNSIGNED (TREE_TYPE (arg0))
419 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
420 || ex_form == LSHIFT_EXPR
421 || ex_form == RSHIFT_EXPR
422 || ex_form == LROTATE_EXPR
423 || ex_form == RROTATE_EXPR))
424 || ex_form == LSHIFT_EXPR
425 /* If we have !flag_wrapv, and either ARG0 or
426 ARG1 is of a signed type, we have to do
427 PLUS_EXPR, MINUS_EXPR or MULT_EXPR in an unsigned
428 type in case the operation in outprec precision
429 could overflow. Otherwise, we would introduce
430 signed-overflow undefinedness. */
431 || ((!(INTEGRAL_TYPE_P (TREE_TYPE (arg0))
432 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
433 || !(INTEGRAL_TYPE_P (TREE_TYPE (arg1))
434 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1))))
435 && ((TYPE_PRECISION (TREE_TYPE (arg0)) * 2u
436 > outprec)
437 || (TYPE_PRECISION (TREE_TYPE (arg1)) * 2u
438 > outprec))
439 && (ex_form == PLUS_EXPR
440 || ex_form == MINUS_EXPR
441 || ex_form == MULT_EXPR)))
443 if (!TYPE_UNSIGNED (typex))
444 typex = unsigned_type_for (typex);
446 else
448 if (TYPE_UNSIGNED (typex))
449 typex = signed_type_for (typex);
451 /* We should do away with all this once we have a proper
452 type promotion/demotion pass, see PR45397. */
453 expr = maybe_fold_build2_loc (dofold, loc, ex_form, typex,
454 convert (typex, arg0),
455 convert (typex, arg1));
456 return convert (type, expr);
459 return NULL_TREE;
462 /* Convert EXPR to some integer (or enum) type TYPE.
464 EXPR must be pointer, integer, discrete (enum, char, or bool), float,
465 fixed-point or vector; in other cases error is called.
467 If DOFOLD is TRUE, we try to simplify newly-created patterns by folding.
469 The result of this is always supposed to be a newly created tree node
470 not in use in any existing structure. */
472 static tree
473 convert_to_integer_1 (tree type, tree expr, bool dofold)
475 enum tree_code ex_form = TREE_CODE (expr);
476 tree intype = TREE_TYPE (expr);
477 unsigned int inprec = element_precision (intype);
478 unsigned int outprec = element_precision (type);
479 location_t loc = EXPR_LOCATION (expr);
481 /* An INTEGER_TYPE cannot be incomplete, but an ENUMERAL_TYPE can
482 be. Consider `enum E = { a, b = (enum E) 3 };'. */
483 if (!COMPLETE_TYPE_P (type))
485 error ("conversion to incomplete type");
486 return error_mark_node;
489 if (ex_form == COMPOUND_EXPR)
491 tree t = convert_to_integer_1 (type, TREE_OPERAND (expr, 1), dofold);
492 if (t == TREE_OPERAND (expr, 1))
493 return expr;
494 return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR, TREE_TYPE (t),
495 TREE_OPERAND (expr, 0), t);
498 /* Convert e.g. (long)round(d) -> lround(d). */
499 /* If we're converting to char, we may encounter differing behavior
500 between converting from double->char vs double->long->char.
501 We're in "undefined" territory but we prefer to be conservative,
502 so only proceed in "unsafe" math mode. */
503 if (optimize
504 && (flag_unsafe_math_optimizations
505 || (long_integer_type_node
506 && outprec >= TYPE_PRECISION (long_integer_type_node))))
508 tree s_expr = strip_float_extensions (expr);
509 tree s_intype = TREE_TYPE (s_expr);
510 const enum built_in_function fcode = builtin_mathfn_code (s_expr);
511 tree fn = 0;
513 switch (fcode)
515 CASE_FLT_FN (BUILT_IN_CEIL):
516 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
517 /* Only convert in ISO C99 mode. */
518 if (!targetm.libc_has_function (function_c99_misc, intype))
519 break;
520 if (outprec < TYPE_PRECISION (integer_type_node)
521 || (outprec == TYPE_PRECISION (integer_type_node)
522 && !TYPE_UNSIGNED (type)))
523 fn = mathfn_built_in (s_intype, BUILT_IN_ICEIL);
524 else if (outprec == TYPE_PRECISION (long_integer_type_node)
525 && !TYPE_UNSIGNED (type))
526 fn = mathfn_built_in (s_intype, BUILT_IN_LCEIL);
527 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
528 && !TYPE_UNSIGNED (type))
529 fn = mathfn_built_in (s_intype, BUILT_IN_LLCEIL);
530 break;
532 CASE_FLT_FN (BUILT_IN_FLOOR):
533 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
534 /* Only convert in ISO C99 mode. */
535 if (!targetm.libc_has_function (function_c99_misc, intype))
536 break;
537 if (outprec < TYPE_PRECISION (integer_type_node)
538 || (outprec == TYPE_PRECISION (integer_type_node)
539 && !TYPE_UNSIGNED (type)))
540 fn = mathfn_built_in (s_intype, BUILT_IN_IFLOOR);
541 else if (outprec == TYPE_PRECISION (long_integer_type_node)
542 && !TYPE_UNSIGNED (type))
543 fn = mathfn_built_in (s_intype, BUILT_IN_LFLOOR);
544 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
545 && !TYPE_UNSIGNED (type))
546 fn = mathfn_built_in (s_intype, BUILT_IN_LLFLOOR);
547 break;
549 CASE_FLT_FN (BUILT_IN_ROUND):
550 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
551 /* Only convert in ISO C99 mode and with -fno-math-errno. */
552 if (!targetm.libc_has_function (function_c99_misc, intype)
553 || flag_errno_math)
554 break;
555 if (outprec < TYPE_PRECISION (integer_type_node)
556 || (outprec == TYPE_PRECISION (integer_type_node)
557 && !TYPE_UNSIGNED (type)))
558 fn = mathfn_built_in (s_intype, BUILT_IN_IROUND);
559 else if (outprec == TYPE_PRECISION (long_integer_type_node)
560 && !TYPE_UNSIGNED (type))
561 fn = mathfn_built_in (s_intype, BUILT_IN_LROUND);
562 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
563 && !TYPE_UNSIGNED (type))
564 fn = mathfn_built_in (s_intype, BUILT_IN_LLROUND);
565 break;
567 CASE_FLT_FN (BUILT_IN_NEARBYINT):
568 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
569 /* Only convert nearbyint* if we can ignore math exceptions. */
570 if (flag_trapping_math)
571 break;
572 gcc_fallthrough ();
573 CASE_FLT_FN (BUILT_IN_RINT):
574 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
575 /* Only convert in ISO C99 mode and with -fno-math-errno. */
576 if (!targetm.libc_has_function (function_c99_misc, intype)
577 || flag_errno_math)
578 break;
579 if (outprec < TYPE_PRECISION (integer_type_node)
580 || (outprec == TYPE_PRECISION (integer_type_node)
581 && !TYPE_UNSIGNED (type)))
582 fn = mathfn_built_in (s_intype, BUILT_IN_IRINT);
583 else if (outprec == TYPE_PRECISION (long_integer_type_node)
584 && !TYPE_UNSIGNED (type))
585 fn = mathfn_built_in (s_intype, BUILT_IN_LRINT);
586 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
587 && !TYPE_UNSIGNED (type))
588 fn = mathfn_built_in (s_intype, BUILT_IN_LLRINT);
589 break;
591 CASE_FLT_FN (BUILT_IN_TRUNC):
592 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
593 if (call_expr_nargs (s_expr) != 1
594 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (s_expr, 0))))
595 break;
596 return convert_to_integer_1 (type, CALL_EXPR_ARG (s_expr, 0),
597 dofold);
599 default:
600 break;
603 if (fn
604 && call_expr_nargs (s_expr) == 1
605 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (s_expr, 0))))
607 tree newexpr = build_call_expr (fn, 1, CALL_EXPR_ARG (s_expr, 0));
608 return convert_to_integer_1 (type, newexpr, dofold);
612 /* Convert (int)logb(d) -> ilogb(d). */
613 if (optimize
614 && flag_unsafe_math_optimizations
615 && !flag_trapping_math && !flag_errno_math && flag_finite_math_only
616 && integer_type_node
617 && (outprec > TYPE_PRECISION (integer_type_node)
618 || (outprec == TYPE_PRECISION (integer_type_node)
619 && !TYPE_UNSIGNED (type))))
621 tree s_expr = strip_float_extensions (expr);
622 tree s_intype = TREE_TYPE (s_expr);
623 const enum built_in_function fcode = builtin_mathfn_code (s_expr);
624 tree fn = 0;
626 switch (fcode)
628 CASE_FLT_FN (BUILT_IN_LOGB):
629 fn = mathfn_built_in (s_intype, BUILT_IN_ILOGB);
630 break;
632 default:
633 break;
636 if (fn
637 && call_expr_nargs (s_expr) == 1
638 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (s_expr, 0))))
640 tree newexpr = build_call_expr (fn, 1, CALL_EXPR_ARG (s_expr, 0));
641 return convert_to_integer_1 (type, newexpr, dofold);
645 switch (TREE_CODE (intype))
647 case POINTER_TYPE:
648 case REFERENCE_TYPE:
649 if (integer_zerop (expr)
650 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (expr)))
651 return build_int_cst (type, 0);
653 /* Convert to an unsigned integer of the correct width first, and from
654 there widen/truncate to the required type. Some targets support the
655 coexistence of multiple valid pointer sizes, so fetch the one we need
656 from the type. */
657 if (!dofold)
658 return build1 (CONVERT_EXPR, type, expr);
659 expr = fold_build1 (CONVERT_EXPR,
660 lang_hooks.types.type_for_size
661 (TYPE_PRECISION (intype), 0),
662 expr);
663 return fold_convert (type, expr);
665 case INTEGER_TYPE:
666 case ENUMERAL_TYPE:
667 case BOOLEAN_TYPE:
668 case OFFSET_TYPE:
669 case BITINT_TYPE:
670 /* If this is a logical operation, which just returns 0 or 1, we can
671 change the type of the expression. */
673 if (TREE_CODE_CLASS (ex_form) == tcc_comparison)
675 expr = copy_node (expr);
676 TREE_TYPE (expr) = type;
677 return expr;
680 /* If we are widening the type, put in an explicit conversion.
681 Similarly if we are not changing the width. After this, we know
682 we are truncating EXPR. */
684 else if (outprec >= inprec)
686 enum tree_code code;
688 /* If the precision of the EXPR's type is K bits and the
689 destination mode has more bits, and the sign is changing,
690 it is not safe to use a NOP_EXPR. For example, suppose
691 that EXPR's type is a 3-bit unsigned integer type, the
692 TYPE is a 3-bit signed integer type, and the machine mode
693 for the types is 8-bit QImode. In that case, the
694 conversion necessitates an explicit sign-extension. In
695 the signed-to-unsigned case the high-order bits have to
696 be cleared. */
697 if (TYPE_UNSIGNED (type) != TYPE_UNSIGNED (TREE_TYPE (expr))
698 && !type_has_mode_precision_p (TREE_TYPE (expr)))
699 code = CONVERT_EXPR;
700 else
701 code = NOP_EXPR;
703 return maybe_fold_build1_loc (dofold, loc, code, type, expr);
706 /* If TYPE is an enumeral type or a type with a precision less
707 than the number of bits in its mode, do the conversion to the
708 type corresponding to its mode, then do a nop conversion
709 to TYPE. */
710 else if (TREE_CODE (type) == ENUMERAL_TYPE
711 || (TREE_CODE (type) != BITINT_TYPE
712 && maybe_ne (outprec,
713 GET_MODE_PRECISION (TYPE_MODE (type)))))
715 expr
716 = convert_to_integer_1 (lang_hooks.types.type_for_mode
717 (TYPE_MODE (type), TYPE_UNSIGNED (type)),
718 expr, dofold);
719 return maybe_fold_build1_loc (dofold, loc, NOP_EXPR, type, expr);
722 /* Here detect when we can distribute the truncation down past some
723 arithmetic. For example, if adding two longs and converting to an
724 int, we can equally well convert both to ints and then add.
725 For the operations handled here, such truncation distribution
726 is always safe.
727 It is desirable in these cases:
728 1) when truncating down to full-word from a larger size
729 2) when truncating takes no work.
730 3) when at least one operand of the arithmetic has been extended
731 (as by C's default conversions). In this case we need two conversions
732 if we do the arithmetic as already requested, so we might as well
733 truncate both and then combine. Perhaps that way we need only one.
735 Note that in general we cannot do the arithmetic in a type
736 shorter than the desired result of conversion, even if the operands
737 are both extended from a shorter type, because they might overflow
738 if combined in that type. The exceptions to this--the times when
739 two narrow values can be combined in their narrow type even to
740 make a wider result--are handled by "shorten" in build_binary_op. */
742 if (dofold)
743 switch (ex_form)
745 case RSHIFT_EXPR:
746 /* We can pass truncation down through right shifting
747 when the shift count is a nonpositive constant. */
748 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
749 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) <= 0)
750 goto trunc1;
751 break;
753 case LSHIFT_EXPR:
754 /* We can pass truncation down through left shifting
755 when the shift count is a nonnegative constant and
756 the target type is unsigned. */
757 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
758 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) >= 0
759 && TYPE_UNSIGNED (type)
760 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
762 /* If shift count is less than the width of the truncated type,
763 really shift. */
764 if (tree_int_cst_lt (TREE_OPERAND (expr, 1), TYPE_SIZE (type)))
765 /* In this case, shifting is like multiplication. */
766 goto trunc1;
767 else
769 /* If it is >= that width, result is zero.
770 Handling this with trunc1 would give the wrong result:
771 (int) ((long long) a << 32) is well defined (as 0)
772 but (int) a << 32 is undefined and would get a
773 warning. */
775 tree t = build_int_cst (type, 0);
777 /* If the original expression had side-effects, we must
778 preserve it. */
779 if (TREE_SIDE_EFFECTS (expr))
780 return build2 (COMPOUND_EXPR, type, expr, t);
781 else
782 return t;
785 break;
787 case TRUNC_DIV_EXPR:
789 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), NULL_TREE);
790 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), NULL_TREE);
792 /* Don't distribute unless the output precision is at least as
793 big as the actual inputs and it has the same signedness. */
794 if (outprec >= TYPE_PRECISION (TREE_TYPE (arg0))
795 && outprec >= TYPE_PRECISION (TREE_TYPE (arg1))
796 /* If signedness of arg0 and arg1 don't match,
797 we can't necessarily find a type to compare them in. */
798 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
799 == TYPE_UNSIGNED (TREE_TYPE (arg1)))
800 /* Do not change the sign of the division. */
801 && (TYPE_UNSIGNED (TREE_TYPE (expr))
802 == TYPE_UNSIGNED (TREE_TYPE (arg0)))
803 /* Either require unsigned division or a division by
804 a constant that is not -1. */
805 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
806 || (TREE_CODE (arg1) == INTEGER_CST
807 && !integer_all_onesp (arg1))))
809 tree tem = do_narrow (loc, ex_form, type, arg0, arg1,
810 expr, inprec, outprec, dofold);
811 if (tem)
812 return tem;
814 break;
817 case MAX_EXPR:
818 case MIN_EXPR:
819 case MULT_EXPR:
821 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), type);
822 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), type);
824 /* Don't distribute unless the output precision is at least as
825 big as the actual inputs. Otherwise, the comparison of the
826 truncated values will be wrong. */
827 if (outprec >= TYPE_PRECISION (TREE_TYPE (arg0))
828 && outprec >= TYPE_PRECISION (TREE_TYPE (arg1))
829 /* If signedness of arg0 and arg1 don't match,
830 we can't necessarily find a type to compare them in. */
831 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
832 == TYPE_UNSIGNED (TREE_TYPE (arg1))))
833 goto trunc1;
834 break;
837 case PLUS_EXPR:
838 case MINUS_EXPR:
839 case BIT_AND_EXPR:
840 case BIT_IOR_EXPR:
841 case BIT_XOR_EXPR:
842 trunc1:
844 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), type);
845 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), type);
847 /* Do not try to narrow operands of pointer subtraction;
848 that will interfere with other folding. */
849 if (ex_form == MINUS_EXPR
850 && CONVERT_EXPR_P (arg0)
851 && CONVERT_EXPR_P (arg1)
852 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
853 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
854 break;
856 tree tem = do_narrow (loc, ex_form, type, arg0, arg1,
857 expr, inprec, outprec, dofold);
858 if (tem)
859 return tem;
861 break;
863 case NEGATE_EXPR:
864 /* Using unsigned arithmetic for signed types may hide overflow
865 bugs. */
866 if (!TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (expr, 0)))
867 && sanitize_flags_p (SANITIZE_SI_OVERFLOW))
868 break;
869 /* Fall through. */
870 case BIT_NOT_EXPR:
871 /* This is not correct for ABS_EXPR,
872 since we must test the sign before truncation. */
874 /* Do the arithmetic in type TYPEX,
875 then convert result to TYPE. */
876 tree typex = type;
878 /* Can't do arithmetic in enumeral types
879 so use an integer type that will hold the values. */
880 if (TREE_CODE (typex) == ENUMERAL_TYPE)
881 typex
882 = lang_hooks.types.type_for_size (TYPE_PRECISION (typex),
883 TYPE_UNSIGNED (typex));
885 if (!TYPE_UNSIGNED (typex))
886 typex = unsigned_type_for (typex);
887 return convert (type,
888 fold_build1 (ex_form, typex,
889 convert (typex,
890 TREE_OPERAND (expr, 0))));
893 CASE_CONVERT:
895 tree argtype = TREE_TYPE (TREE_OPERAND (expr, 0));
896 /* Don't introduce a "can't convert between vector values
897 of different size" error. */
898 if (TREE_CODE (argtype) == VECTOR_TYPE
899 && maybe_ne (GET_MODE_SIZE (TYPE_MODE (argtype)),
900 GET_MODE_SIZE (TYPE_MODE (type))))
901 break;
903 /* If truncating after truncating, might as well do all at once.
904 If truncating after extending, we may get rid of wasted work. */
905 return convert (type, get_unwidened (TREE_OPERAND (expr, 0), type));
907 case COND_EXPR:
908 /* It is sometimes worthwhile to push the narrowing down through
909 the conditional and never loses. A COND_EXPR may have a throw
910 as one operand, which then has void type. Just leave void
911 operands as they are. */
912 return
913 fold_build3 (COND_EXPR, type, TREE_OPERAND (expr, 0),
914 VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1)))
915 ? TREE_OPERAND (expr, 1)
916 : convert (type, TREE_OPERAND (expr, 1)),
917 VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 2)))
918 ? TREE_OPERAND (expr, 2)
919 : convert (type, TREE_OPERAND (expr, 2)));
921 default:
922 break;
925 /* When parsing long initializers, we might end up with a lot of casts.
926 Shortcut this. */
927 if (TREE_CODE (tree_strip_any_location_wrapper (expr)) == INTEGER_CST)
928 return fold_convert (type, expr);
929 return build1 (CONVERT_EXPR, type, expr);
931 case REAL_TYPE:
932 if (sanitize_flags_p (SANITIZE_FLOAT_CAST)
933 && current_function_decl != NULL_TREE)
935 expr = save_expr (expr);
936 tree check = ubsan_instrument_float_cast (loc, type, expr);
937 expr = build1 (FIX_TRUNC_EXPR, type, expr);
938 if (check == NULL_TREE)
939 return expr;
940 return maybe_fold_build2_loc (dofold, loc, COMPOUND_EXPR,
941 TREE_TYPE (expr), check, expr);
943 else
944 return build1 (FIX_TRUNC_EXPR, type, expr);
946 case FIXED_POINT_TYPE:
947 return build1 (FIXED_CONVERT_EXPR, type, expr);
949 case COMPLEX_TYPE:
950 expr = maybe_fold_build1_loc (dofold, loc, REALPART_EXPR,
951 TREE_TYPE (TREE_TYPE (expr)), expr);
952 return convert (type, expr);
954 case VECTOR_TYPE:
955 if (!tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (TREE_TYPE (expr))))
957 error ("cannot convert a vector of type %qT"
958 " to type %qT which has different size",
959 TREE_TYPE (expr), type);
960 return error_mark_node;
962 return build1 (VIEW_CONVERT_EXPR, type, expr);
964 default:
965 error ("aggregate value used where an integer was expected");
966 return error_mark_node;
970 /* Convert EXPR to some integer (or enum) type TYPE.
972 EXPR must be pointer, integer, discrete (enum, char, or bool), float,
973 fixed-point or vector; in other cases error is called.
975 The result of this is always supposed to be a newly created tree node
976 not in use in any existing structure. */
978 tree
979 convert_to_integer (tree type, tree expr)
981 return convert_to_integer_1 (type, expr, true);
984 /* A wrapper around convert_to_complex_1 that only folds the
985 expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P. */
987 tree
988 convert_to_integer_maybe_fold (tree type, tree expr, bool dofold)
990 tree result
991 = convert_to_integer_1 (type, expr,
992 dofold || CONSTANT_CLASS_OR_WRAPPER_P (expr));
993 return preserve_any_location_wrapper (result, expr);
996 /* Convert EXPR to the complex type TYPE in the usual ways. If FOLD_P is
997 true, try to fold the expression. */
999 static tree
1000 convert_to_complex_1 (tree type, tree expr, bool fold_p)
1002 location_t loc = EXPR_LOCATION (expr);
1003 tree subtype = TREE_TYPE (type);
1005 switch (TREE_CODE (TREE_TYPE (expr)))
1007 case REAL_TYPE:
1008 case FIXED_POINT_TYPE:
1009 case INTEGER_TYPE:
1010 case ENUMERAL_TYPE:
1011 case BOOLEAN_TYPE:
1012 case BITINT_TYPE:
1014 tree real = convert (subtype, expr);
1015 tree imag = convert (subtype, integer_zero_node);
1016 if (error_operand_p (real) || error_operand_p (imag))
1017 return error_mark_node;
1018 return build2 (COMPLEX_EXPR, type, real, imag);
1021 case COMPLEX_TYPE:
1023 tree elt_type = TREE_TYPE (TREE_TYPE (expr));
1025 if (TYPE_MAIN_VARIANT (elt_type) == TYPE_MAIN_VARIANT (subtype))
1026 return expr;
1027 else if (TREE_CODE (expr) == COMPOUND_EXPR)
1029 tree t = convert_to_complex_1 (type, TREE_OPERAND (expr, 1),
1030 fold_p);
1031 if (t == TREE_OPERAND (expr, 1))
1032 return expr;
1033 return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR,
1034 TREE_TYPE (t), TREE_OPERAND (expr, 0), t);
1036 else if (TREE_CODE (expr) == COMPLEX_EXPR)
1037 return maybe_fold_build2_loc (fold_p, loc, COMPLEX_EXPR, type,
1038 convert (subtype,
1039 TREE_OPERAND (expr, 0)),
1040 convert (subtype,
1041 TREE_OPERAND (expr, 1)));
1042 else
1044 expr = save_expr (expr);
1045 tree realp = maybe_fold_build1_loc (fold_p, loc, REALPART_EXPR,
1046 TREE_TYPE (TREE_TYPE (expr)),
1047 expr);
1048 tree imagp = maybe_fold_build1_loc (fold_p, loc, IMAGPART_EXPR,
1049 TREE_TYPE (TREE_TYPE (expr)),
1050 expr);
1051 return maybe_fold_build2_loc (fold_p, loc, COMPLEX_EXPR, type,
1052 convert (subtype, realp),
1053 convert (subtype, imagp));
1057 case POINTER_TYPE:
1058 case REFERENCE_TYPE:
1059 error ("pointer value used where a complex was expected");
1060 return error_mark_node;
1062 default:
1063 error ("aggregate value used where a complex was expected");
1064 return error_mark_node;
1068 /* A wrapper around convert_to_complex_1 that always folds the
1069 expression. */
1071 tree
1072 convert_to_complex (tree type, tree expr)
1074 return convert_to_complex_1 (type, expr, true);
1077 /* A wrapper around convert_to_complex_1 that only folds the
1078 expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P. */
1080 tree
1081 convert_to_complex_maybe_fold (tree type, tree expr, bool dofold)
1083 tree result
1084 = convert_to_complex_1 (type, expr,
1085 dofold || CONSTANT_CLASS_OR_WRAPPER_P (expr));
1086 return preserve_any_location_wrapper (result, expr);
1089 /* Convert EXPR to the vector type TYPE in the usual ways. */
1091 tree
1092 convert_to_vector (tree type, tree expr)
1094 switch (TREE_CODE (TREE_TYPE (expr)))
1096 case INTEGER_TYPE:
1097 case VECTOR_TYPE:
1098 if (!tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (TREE_TYPE (expr))))
1100 error ("cannot convert a value of type %qT"
1101 " to vector type %qT which has different size",
1102 TREE_TYPE (expr), type);
1103 return error_mark_node;
1105 return build1 (VIEW_CONVERT_EXPR, type, expr);
1107 default:
1108 error ("cannot convert value to a vector");
1109 return error_mark_node;
1113 /* Convert EXPR to some fixed-point type TYPE.
1115 EXPR must be fixed-point, float, integer, or enumeral;
1116 in other cases error is called. */
1118 tree
1119 convert_to_fixed (tree type, tree expr)
1121 if (integer_zerop (expr))
1123 tree fixed_zero_node = build_fixed (type, FCONST0 (TYPE_MODE (type)));
1124 return fixed_zero_node;
1126 else if (integer_onep (expr) && ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)))
1128 tree fixed_one_node = build_fixed (type, FCONST1 (TYPE_MODE (type)));
1129 return fixed_one_node;
1132 switch (TREE_CODE (TREE_TYPE (expr)))
1134 case FIXED_POINT_TYPE:
1135 case INTEGER_TYPE:
1136 case ENUMERAL_TYPE:
1137 case BOOLEAN_TYPE:
1138 case REAL_TYPE:
1139 return build1 (FIXED_CONVERT_EXPR, type, expr);
1141 case COMPLEX_TYPE:
1142 return convert (type,
1143 fold_build1 (REALPART_EXPR,
1144 TREE_TYPE (TREE_TYPE (expr)), expr));
1146 default:
1147 error ("aggregate value used where a fixed-point was expected");
1148 return error_mark_node;
1152 #if CHECKING_P
1154 namespace selftest {
1156 /* Selftests for conversions. */
1158 static void
1159 test_convert_to_integer_maybe_fold (tree orig_type, tree new_type)
1161 /* Calling convert_to_integer_maybe_fold on an INTEGER_CST. */
1163 tree orig_cst = build_int_cst (orig_type, 42);
1165 /* Verify that convert_to_integer_maybe_fold on a constant returns a new
1166 constant of the new type, unless the types are the same, in which
1167 case verify it's a no-op. */
1169 tree result = convert_to_integer_maybe_fold (new_type,
1170 orig_cst, false);
1171 if (orig_type != new_type)
1173 ASSERT_EQ (TREE_TYPE (result), new_type);
1174 ASSERT_EQ (TREE_CODE (result), INTEGER_CST);
1176 else
1177 ASSERT_EQ (result, orig_cst);
1180 /* Calling convert_to_integer_maybe_fold on a location wrapper around
1181 an INTEGER_CST.
1183 Verify that convert_to_integer_maybe_fold on a location wrapper
1184 around a constant returns a new location wrapper around an equivalent
1185 constant, both of the new type, unless the types are the same,
1186 in which case the original wrapper should be returned. */
1188 const location_t loc = BUILTINS_LOCATION;
1189 tree wrapped_orig_cst = maybe_wrap_with_location (orig_cst, loc);
1190 tree result
1191 = convert_to_integer_maybe_fold (new_type, wrapped_orig_cst, false);
1192 ASSERT_EQ (TREE_TYPE (result), new_type);
1193 ASSERT_EQ (EXPR_LOCATION (result), loc);
1194 ASSERT_TRUE (location_wrapper_p (result));
1195 ASSERT_EQ (TREE_TYPE (TREE_OPERAND (result, 0)), new_type);
1196 ASSERT_EQ (TREE_CODE (TREE_OPERAND (result, 0)), INTEGER_CST);
1198 if (orig_type == new_type)
1199 ASSERT_EQ (result, wrapped_orig_cst);
1203 /* Verify that convert_to_integer_maybe_fold preserves locations. */
1205 static void
1206 test_convert_to_integer_maybe_fold ()
1208 /* char -> long. */
1209 test_convert_to_integer_maybe_fold (char_type_node, long_integer_type_node);
1211 /* char -> char. */
1212 test_convert_to_integer_maybe_fold (char_type_node, char_type_node);
1214 /* long -> char. */
1215 test_convert_to_integer_maybe_fold (char_type_node, long_integer_type_node);
1217 /* long -> long. */
1218 test_convert_to_integer_maybe_fold (long_integer_type_node,
1219 long_integer_type_node);
1222 /* Run all of the selftests within this file. */
1224 void
1225 convert_cc_tests ()
1227 test_convert_to_integer_maybe_fold ();
1230 } // namespace selftest
1232 #endif /* CHECKING_P */