PR C++/88114 Gen destructor of an abstract class
[official-gcc.git] / gcc / convert.c
blob1a3353c870768a33fe22480ec97c7d3e0c504075
1 /* Utility routines for data type conversion for GCC.
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* These routines are somewhat language-independent utility function
22 intended to be called by the language-specific convert () functions. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "target.h"
28 #include "tree.h"
29 #include "diagnostic-core.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
32 #include "convert.h"
33 #include "langhooks.h"
34 #include "builtins.h"
35 #include "ubsan.h"
36 #include "stringpool.h"
37 #include "attribs.h"
38 #include "asan.h"
39 #include "selftest.h"
41 #define maybe_fold_build1_loc(FOLD_P, LOC, CODE, TYPE, EXPR) \
42 ((FOLD_P) ? fold_build1_loc (LOC, CODE, TYPE, EXPR) \
43 : build1_loc (LOC, CODE, TYPE, EXPR))
44 #define maybe_fold_build2_loc(FOLD_P, LOC, CODE, TYPE, EXPR1, EXPR2) \
45 ((FOLD_P) ? fold_build2_loc (LOC, CODE, TYPE, EXPR1, EXPR2) \
46 : build2_loc (LOC, CODE, TYPE, EXPR1, EXPR2))
48 /* Convert EXPR to some pointer or reference type TYPE.
49 EXPR must be pointer, reference, integer, enumeral, or literal zero;
50 in other cases error is called. If FOLD_P is true, try to fold the
51 expression. */
53 static tree
54 convert_to_pointer_1 (tree type, tree expr, bool fold_p)
56 location_t loc = EXPR_LOCATION (expr);
57 if (TREE_TYPE (expr) == type)
58 return expr;
60 switch (TREE_CODE (TREE_TYPE (expr)))
62 case POINTER_TYPE:
63 case REFERENCE_TYPE:
65 /* If the pointers point to different address spaces, conversion needs
66 to be done via a ADDR_SPACE_CONVERT_EXPR instead of a NOP_EXPR. */
67 addr_space_t to_as = TYPE_ADDR_SPACE (TREE_TYPE (type));
68 addr_space_t from_as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr)));
70 if (to_as == from_as)
71 return maybe_fold_build1_loc (fold_p, loc, NOP_EXPR, type, expr);
72 else
73 return maybe_fold_build1_loc (fold_p, loc, ADDR_SPACE_CONVERT_EXPR,
74 type, expr);
77 case INTEGER_TYPE:
78 case ENUMERAL_TYPE:
79 case BOOLEAN_TYPE:
81 /* If the input precision differs from the target pointer type
82 precision, first convert the input expression to an integer type of
83 the target precision. Some targets, e.g. VMS, need several pointer
84 sizes to coexist so the latter isn't necessarily POINTER_SIZE. */
85 unsigned int pprec = TYPE_PRECISION (type);
86 unsigned int eprec = TYPE_PRECISION (TREE_TYPE (expr));
88 if (eprec != pprec)
89 expr
90 = maybe_fold_build1_loc (fold_p, loc, NOP_EXPR,
91 lang_hooks.types.type_for_size (pprec, 0),
92 expr);
94 return maybe_fold_build1_loc (fold_p, loc, CONVERT_EXPR, type, expr);
96 default:
97 error ("cannot convert to a pointer type");
98 return convert_to_pointer_1 (type, integer_zero_node, fold_p);
102 /* Subroutine of the various convert_to_*_maybe_fold routines.
104 If a location wrapper has been folded to a constant (presumably of
105 a different type), re-wrap the new constant with a location wrapper. */
107 tree
108 preserve_any_location_wrapper (tree result, tree orig_expr)
110 if (CONSTANT_CLASS_P (result) && location_wrapper_p (orig_expr))
112 if (result == TREE_OPERAND (orig_expr, 0))
113 return orig_expr;
114 else
115 return maybe_wrap_with_location (result, EXPR_LOCATION (orig_expr));
118 return result;
121 /* A wrapper around convert_to_pointer_1 that always folds the
122 expression. */
124 tree
125 convert_to_pointer (tree type, tree expr)
127 return convert_to_pointer_1 (type, expr, true);
130 /* A wrapper around convert_to_pointer_1 that only folds the
131 expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P. */
133 tree
134 convert_to_pointer_maybe_fold (tree type, tree expr, bool dofold)
136 tree result
137 = convert_to_pointer_1 (type, expr,
138 dofold || CONSTANT_CLASS_OR_WRAPPER_P (expr));
139 return preserve_any_location_wrapper (result, expr);
142 /* Convert EXPR to some floating-point type TYPE.
144 EXPR must be float, fixed-point, integer, or enumeral;
145 in other cases error is called. If FOLD_P is true, try to fold
146 the expression. */
148 static tree
149 convert_to_real_1 (tree type, tree expr, bool fold_p)
151 enum built_in_function fcode = builtin_mathfn_code (expr);
152 tree itype = TREE_TYPE (expr);
153 location_t loc = EXPR_LOCATION (expr);
155 if (TREE_CODE (expr) == COMPOUND_EXPR)
157 tree t = convert_to_real_1 (type, TREE_OPERAND (expr, 1), fold_p);
158 if (t == TREE_OPERAND (expr, 1))
159 return expr;
160 return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR, TREE_TYPE (t),
161 TREE_OPERAND (expr, 0), t);
164 /* Disable until we figure out how to decide whether the functions are
165 present in runtime. */
166 /* Convert (float)sqrt((double)x) where x is float into sqrtf(x) */
167 if (optimize
168 && (TYPE_MODE (type) == TYPE_MODE (double_type_node)
169 || TYPE_MODE (type) == TYPE_MODE (float_type_node)))
171 switch (fcode)
173 #define CASE_MATHFN(FN) case BUILT_IN_##FN: case BUILT_IN_##FN##L:
174 CASE_MATHFN (COSH)
175 CASE_MATHFN (EXP)
176 CASE_MATHFN (EXP10)
177 CASE_MATHFN (EXP2)
178 CASE_MATHFN (EXPM1)
179 CASE_MATHFN (GAMMA)
180 CASE_MATHFN (J0)
181 CASE_MATHFN (J1)
182 CASE_MATHFN (LGAMMA)
183 CASE_MATHFN (POW10)
184 CASE_MATHFN (SINH)
185 CASE_MATHFN (TGAMMA)
186 CASE_MATHFN (Y0)
187 CASE_MATHFN (Y1)
188 /* The above functions may set errno differently with float
189 input or output so this transformation is not safe with
190 -fmath-errno. */
191 if (flag_errno_math)
192 break;
193 gcc_fallthrough ();
194 CASE_MATHFN (ACOS)
195 CASE_MATHFN (ACOSH)
196 CASE_MATHFN (ASIN)
197 CASE_MATHFN (ASINH)
198 CASE_MATHFN (ATAN)
199 CASE_MATHFN (ATANH)
200 CASE_MATHFN (CBRT)
201 CASE_MATHFN (COS)
202 CASE_MATHFN (ERF)
203 CASE_MATHFN (ERFC)
204 CASE_MATHFN (LOG)
205 CASE_MATHFN (LOG10)
206 CASE_MATHFN (LOG2)
207 CASE_MATHFN (LOG1P)
208 CASE_MATHFN (SIN)
209 CASE_MATHFN (TAN)
210 CASE_MATHFN (TANH)
211 /* The above functions are not safe to do this conversion. */
212 if (!flag_unsafe_math_optimizations)
213 break;
214 gcc_fallthrough ();
215 CASE_MATHFN (SQRT)
216 CASE_MATHFN (FABS)
217 CASE_MATHFN (LOGB)
218 #undef CASE_MATHFN
220 tree arg0 = strip_float_extensions (CALL_EXPR_ARG (expr, 0));
221 tree newtype = type;
223 /* We have (outertype)sqrt((innertype)x). Choose the wider mode from
224 the both as the safe type for operation. */
225 if (TYPE_PRECISION (TREE_TYPE (arg0)) > TYPE_PRECISION (type))
226 newtype = TREE_TYPE (arg0);
228 /* We consider to convert
230 (T1) sqrtT2 ((T2) exprT3)
232 (T1) sqrtT4 ((T4) exprT3)
234 , where T1 is TYPE, T2 is ITYPE, T3 is TREE_TYPE (ARG0),
235 and T4 is NEWTYPE. All those types are of floating point types.
236 T4 (NEWTYPE) should be narrower than T2 (ITYPE). This conversion
237 is safe only if P1 >= P2*2+2, where P1 and P2 are precisions of
238 T2 and T4. See the following URL for a reference:
239 http://stackoverflow.com/questions/9235456/determining-
240 floating-point-square-root
242 if ((fcode == BUILT_IN_SQRT || fcode == BUILT_IN_SQRTL)
243 && !flag_unsafe_math_optimizations)
245 /* The following conversion is unsafe even the precision condition
246 below is satisfied:
248 (float) sqrtl ((long double) double_val) -> (float) sqrt (double_val)
250 if (TYPE_MODE (type) != TYPE_MODE (newtype))
251 break;
253 int p1 = REAL_MODE_FORMAT (TYPE_MODE (itype))->p;
254 int p2 = REAL_MODE_FORMAT (TYPE_MODE (newtype))->p;
255 if (p1 < p2 * 2 + 2)
256 break;
259 /* Be careful about integer to fp conversions.
260 These may overflow still. */
261 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
262 && TYPE_PRECISION (newtype) < TYPE_PRECISION (itype)
263 && (TYPE_MODE (newtype) == TYPE_MODE (double_type_node)
264 || TYPE_MODE (newtype) == TYPE_MODE (float_type_node)))
266 tree fn = mathfn_built_in (newtype, fcode);
267 if (fn)
269 tree arg = convert_to_real_1 (newtype, arg0, fold_p);
270 expr = build_call_expr (fn, 1, arg);
271 if (newtype == type)
272 return expr;
276 default:
277 break;
281 /* Propagate the cast into the operation. */
282 if (itype != type && FLOAT_TYPE_P (type))
283 switch (TREE_CODE (expr))
285 /* Convert (float)-x into -(float)x. This is safe for
286 round-to-nearest rounding mode when the inner type is float. */
287 case ABS_EXPR:
288 case NEGATE_EXPR:
289 if (!flag_rounding_math
290 && FLOAT_TYPE_P (itype)
291 && TYPE_PRECISION (type) < TYPE_PRECISION (itype))
293 tree arg = convert_to_real_1 (type, TREE_OPERAND (expr, 0),
294 fold_p);
295 return build1 (TREE_CODE (expr), type, arg);
297 break;
298 /* Convert (outertype)((innertype0)a+(innertype1)b)
299 into ((newtype)a+(newtype)b) where newtype
300 is the widest mode from all of these. */
301 case PLUS_EXPR:
302 case MINUS_EXPR:
303 case MULT_EXPR:
304 case RDIV_EXPR:
306 tree arg0 = strip_float_extensions (TREE_OPERAND (expr, 0));
307 tree arg1 = strip_float_extensions (TREE_OPERAND (expr, 1));
309 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
310 && FLOAT_TYPE_P (TREE_TYPE (arg1))
311 && DECIMAL_FLOAT_TYPE_P (itype) == DECIMAL_FLOAT_TYPE_P (type))
313 tree newtype = type;
315 if (TYPE_MODE (TREE_TYPE (arg0)) == SDmode
316 || TYPE_MODE (TREE_TYPE (arg1)) == SDmode
317 || TYPE_MODE (type) == SDmode)
318 newtype = dfloat32_type_node;
319 if (TYPE_MODE (TREE_TYPE (arg0)) == DDmode
320 || TYPE_MODE (TREE_TYPE (arg1)) == DDmode
321 || TYPE_MODE (type) == DDmode)
322 newtype = dfloat64_type_node;
323 if (TYPE_MODE (TREE_TYPE (arg0)) == TDmode
324 || TYPE_MODE (TREE_TYPE (arg1)) == TDmode
325 || TYPE_MODE (type) == TDmode)
326 newtype = dfloat128_type_node;
327 if (newtype == dfloat32_type_node
328 || newtype == dfloat64_type_node
329 || newtype == dfloat128_type_node)
331 expr = build2 (TREE_CODE (expr), newtype,
332 convert_to_real_1 (newtype, arg0,
333 fold_p),
334 convert_to_real_1 (newtype, arg1,
335 fold_p));
336 if (newtype == type)
337 return expr;
338 break;
341 if (TYPE_PRECISION (TREE_TYPE (arg0)) > TYPE_PRECISION (newtype))
342 newtype = TREE_TYPE (arg0);
343 if (TYPE_PRECISION (TREE_TYPE (arg1)) > TYPE_PRECISION (newtype))
344 newtype = TREE_TYPE (arg1);
345 /* Sometimes this transformation is safe (cannot
346 change results through affecting double rounding
347 cases) and sometimes it is not. If NEWTYPE is
348 wider than TYPE, e.g. (float)((long double)double
349 + (long double)double) converted to
350 (float)(double + double), the transformation is
351 unsafe regardless of the details of the types
352 involved; double rounding can arise if the result
353 of NEWTYPE arithmetic is a NEWTYPE value half way
354 between two representable TYPE values but the
355 exact value is sufficiently different (in the
356 right direction) for this difference to be
357 visible in ITYPE arithmetic. If NEWTYPE is the
358 same as TYPE, however, the transformation may be
359 safe depending on the types involved: it is safe
360 if the ITYPE has strictly more than twice as many
361 mantissa bits as TYPE, can represent infinities
362 and NaNs if the TYPE can, and has sufficient
363 exponent range for the product or ratio of two
364 values representable in the TYPE to be within the
365 range of normal values of ITYPE. */
366 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (itype)
367 && (flag_unsafe_math_optimizations
368 || (TYPE_PRECISION (newtype) == TYPE_PRECISION (type)
369 && real_can_shorten_arithmetic (TYPE_MODE (itype),
370 TYPE_MODE (type))
371 && !excess_precision_type (newtype))))
373 expr = build2 (TREE_CODE (expr), newtype,
374 convert_to_real_1 (newtype, arg0,
375 fold_p),
376 convert_to_real_1 (newtype, arg1,
377 fold_p));
378 if (newtype == type)
379 return expr;
383 break;
384 default:
385 break;
388 switch (TREE_CODE (TREE_TYPE (expr)))
390 case REAL_TYPE:
391 /* Ignore the conversion if we don't need to store intermediate
392 results and neither type is a decimal float. */
393 return build1_loc (loc,
394 (flag_float_store
395 || DECIMAL_FLOAT_TYPE_P (type)
396 || DECIMAL_FLOAT_TYPE_P (itype))
397 ? CONVERT_EXPR : NOP_EXPR, type, expr);
399 case INTEGER_TYPE:
400 case ENUMERAL_TYPE:
401 case BOOLEAN_TYPE:
402 return build1 (FLOAT_EXPR, type, expr);
404 case FIXED_POINT_TYPE:
405 return build1 (FIXED_CONVERT_EXPR, type, expr);
407 case COMPLEX_TYPE:
408 return convert (type,
409 maybe_fold_build1_loc (fold_p, loc, REALPART_EXPR,
410 TREE_TYPE (TREE_TYPE (expr)),
411 expr));
413 case POINTER_TYPE:
414 case REFERENCE_TYPE:
415 error ("pointer value used where a floating point value was expected");
416 return convert_to_real_1 (type, integer_zero_node, fold_p);
418 default:
419 error ("aggregate value used where a float was expected");
420 return convert_to_real_1 (type, integer_zero_node, fold_p);
424 /* A wrapper around convert_to_real_1 that always folds the
425 expression. */
427 tree
428 convert_to_real (tree type, tree expr)
430 return convert_to_real_1 (type, expr, true);
433 /* A wrapper around convert_to_real_1 that only folds the
434 expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P. */
436 tree
437 convert_to_real_maybe_fold (tree type, tree expr, bool dofold)
439 tree result
440 = convert_to_real_1 (type, expr,
441 dofold || CONSTANT_CLASS_OR_WRAPPER_P (expr));
442 return preserve_any_location_wrapper (result, expr);
445 /* Try to narrow EX_FORM ARG0 ARG1 in narrowed arg types producing a
446 result in TYPE. */
448 static tree
449 do_narrow (location_t loc,
450 enum tree_code ex_form, tree type, tree arg0, tree arg1,
451 tree expr, unsigned inprec, unsigned outprec, bool dofold)
453 /* Do the arithmetic in type TYPEX,
454 then convert result to TYPE. */
455 tree typex = type;
457 /* Can't do arithmetic in enumeral types
458 so use an integer type that will hold the values. */
459 if (TREE_CODE (typex) == ENUMERAL_TYPE)
460 typex = lang_hooks.types.type_for_size (TYPE_PRECISION (typex),
461 TYPE_UNSIGNED (typex));
463 /* The type demotion below might cause doing unsigned arithmetic
464 instead of signed, and thus hide overflow bugs. */
465 if ((ex_form == PLUS_EXPR || ex_form == MINUS_EXPR)
466 && !TYPE_UNSIGNED (typex)
467 && sanitize_flags_p (SANITIZE_SI_OVERFLOW))
468 return NULL_TREE;
470 /* But now perhaps TYPEX is as wide as INPREC.
471 In that case, do nothing special here.
472 (Otherwise would recurse infinitely in convert. */
473 if (TYPE_PRECISION (typex) != inprec)
475 /* Don't do unsigned arithmetic where signed was wanted,
476 or vice versa.
477 Exception: if both of the original operands were
478 unsigned then we can safely do the work as unsigned.
479 Exception: shift operations take their type solely
480 from the first argument.
481 Exception: the LSHIFT_EXPR case above requires that
482 we perform this operation unsigned lest we produce
483 signed-overflow undefinedness.
484 And we may need to do it as unsigned
485 if we truncate to the original size. */
486 if (TYPE_UNSIGNED (TREE_TYPE (expr))
487 || (TYPE_UNSIGNED (TREE_TYPE (arg0))
488 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
489 || ex_form == LSHIFT_EXPR
490 || ex_form == RSHIFT_EXPR
491 || ex_form == LROTATE_EXPR
492 || ex_form == RROTATE_EXPR))
493 || ex_form == LSHIFT_EXPR
494 /* If we have !flag_wrapv, and either ARG0 or
495 ARG1 is of a signed type, we have to do
496 PLUS_EXPR, MINUS_EXPR or MULT_EXPR in an unsigned
497 type in case the operation in outprec precision
498 could overflow. Otherwise, we would introduce
499 signed-overflow undefinedness. */
500 || ((!(INTEGRAL_TYPE_P (TREE_TYPE (arg0))
501 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
502 || !(INTEGRAL_TYPE_P (TREE_TYPE (arg1))
503 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1))))
504 && ((TYPE_PRECISION (TREE_TYPE (arg0)) * 2u
505 > outprec)
506 || (TYPE_PRECISION (TREE_TYPE (arg1)) * 2u
507 > outprec))
508 && (ex_form == PLUS_EXPR
509 || ex_form == MINUS_EXPR
510 || ex_form == MULT_EXPR)))
512 if (!TYPE_UNSIGNED (typex))
513 typex = unsigned_type_for (typex);
515 else
517 if (TYPE_UNSIGNED (typex))
518 typex = signed_type_for (typex);
520 /* We should do away with all this once we have a proper
521 type promotion/demotion pass, see PR45397. */
522 expr = maybe_fold_build2_loc (dofold, loc, ex_form, typex,
523 convert (typex, arg0),
524 convert (typex, arg1));
525 return convert (type, expr);
528 return NULL_TREE;
531 /* Convert EXPR to some integer (or enum) type TYPE.
533 EXPR must be pointer, integer, discrete (enum, char, or bool), float,
534 fixed-point or vector; in other cases error is called.
536 If DOFOLD is TRUE, we try to simplify newly-created patterns by folding.
538 The result of this is always supposed to be a newly created tree node
539 not in use in any existing structure. */
541 static tree
542 convert_to_integer_1 (tree type, tree expr, bool dofold)
544 enum tree_code ex_form = TREE_CODE (expr);
545 tree intype = TREE_TYPE (expr);
546 unsigned int inprec = element_precision (intype);
547 unsigned int outprec = element_precision (type);
548 location_t loc = EXPR_LOCATION (expr);
550 /* An INTEGER_TYPE cannot be incomplete, but an ENUMERAL_TYPE can
551 be. Consider `enum E = { a, b = (enum E) 3 };'. */
552 if (!COMPLETE_TYPE_P (type))
554 error ("conversion to incomplete type");
555 return error_mark_node;
558 if (ex_form == COMPOUND_EXPR)
560 tree t = convert_to_integer_1 (type, TREE_OPERAND (expr, 1), dofold);
561 if (t == TREE_OPERAND (expr, 1))
562 return expr;
563 return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR, TREE_TYPE (t),
564 TREE_OPERAND (expr, 0), t);
567 /* Convert e.g. (long)round(d) -> lround(d). */
568 /* If we're converting to char, we may encounter differing behavior
569 between converting from double->char vs double->long->char.
570 We're in "undefined" territory but we prefer to be conservative,
571 so only proceed in "unsafe" math mode. */
572 if (optimize
573 && (flag_unsafe_math_optimizations
574 || (long_integer_type_node
575 && outprec >= TYPE_PRECISION (long_integer_type_node))))
577 tree s_expr = strip_float_extensions (expr);
578 tree s_intype = TREE_TYPE (s_expr);
579 const enum built_in_function fcode = builtin_mathfn_code (s_expr);
580 tree fn = 0;
582 switch (fcode)
584 CASE_FLT_FN (BUILT_IN_CEIL):
585 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
586 /* Only convert in ISO C99 mode. */
587 if (!targetm.libc_has_function (function_c99_misc))
588 break;
589 if (outprec < TYPE_PRECISION (integer_type_node)
590 || (outprec == TYPE_PRECISION (integer_type_node)
591 && !TYPE_UNSIGNED (type)))
592 fn = mathfn_built_in (s_intype, BUILT_IN_ICEIL);
593 else if (outprec == TYPE_PRECISION (long_integer_type_node)
594 && !TYPE_UNSIGNED (type))
595 fn = mathfn_built_in (s_intype, BUILT_IN_LCEIL);
596 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
597 && !TYPE_UNSIGNED (type))
598 fn = mathfn_built_in (s_intype, BUILT_IN_LLCEIL);
599 break;
601 CASE_FLT_FN (BUILT_IN_FLOOR):
602 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
603 /* Only convert in ISO C99 mode. */
604 if (!targetm.libc_has_function (function_c99_misc))
605 break;
606 if (outprec < TYPE_PRECISION (integer_type_node)
607 || (outprec == TYPE_PRECISION (integer_type_node)
608 && !TYPE_UNSIGNED (type)))
609 fn = mathfn_built_in (s_intype, BUILT_IN_IFLOOR);
610 else if (outprec == TYPE_PRECISION (long_integer_type_node)
611 && !TYPE_UNSIGNED (type))
612 fn = mathfn_built_in (s_intype, BUILT_IN_LFLOOR);
613 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
614 && !TYPE_UNSIGNED (type))
615 fn = mathfn_built_in (s_intype, BUILT_IN_LLFLOOR);
616 break;
618 CASE_FLT_FN (BUILT_IN_ROUND):
619 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
620 /* Only convert in ISO C99 mode and with -fno-math-errno. */
621 if (!targetm.libc_has_function (function_c99_misc) || flag_errno_math)
622 break;
623 if (outprec < TYPE_PRECISION (integer_type_node)
624 || (outprec == TYPE_PRECISION (integer_type_node)
625 && !TYPE_UNSIGNED (type)))
626 fn = mathfn_built_in (s_intype, BUILT_IN_IROUND);
627 else if (outprec == TYPE_PRECISION (long_integer_type_node)
628 && !TYPE_UNSIGNED (type))
629 fn = mathfn_built_in (s_intype, BUILT_IN_LROUND);
630 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
631 && !TYPE_UNSIGNED (type))
632 fn = mathfn_built_in (s_intype, BUILT_IN_LLROUND);
633 break;
635 CASE_FLT_FN (BUILT_IN_NEARBYINT):
636 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
637 /* Only convert nearbyint* if we can ignore math exceptions. */
638 if (flag_trapping_math)
639 break;
640 gcc_fallthrough ();
641 CASE_FLT_FN (BUILT_IN_RINT):
642 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
643 /* Only convert in ISO C99 mode and with -fno-math-errno. */
644 if (!targetm.libc_has_function (function_c99_misc) || flag_errno_math)
645 break;
646 if (outprec < TYPE_PRECISION (integer_type_node)
647 || (outprec == TYPE_PRECISION (integer_type_node)
648 && !TYPE_UNSIGNED (type)))
649 fn = mathfn_built_in (s_intype, BUILT_IN_IRINT);
650 else if (outprec == TYPE_PRECISION (long_integer_type_node)
651 && !TYPE_UNSIGNED (type))
652 fn = mathfn_built_in (s_intype, BUILT_IN_LRINT);
653 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
654 && !TYPE_UNSIGNED (type))
655 fn = mathfn_built_in (s_intype, BUILT_IN_LLRINT);
656 break;
658 CASE_FLT_FN (BUILT_IN_TRUNC):
659 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
660 return convert_to_integer_1 (type, CALL_EXPR_ARG (s_expr, 0), dofold);
662 default:
663 break;
666 if (fn)
668 tree newexpr = build_call_expr (fn, 1, CALL_EXPR_ARG (s_expr, 0));
669 return convert_to_integer_1 (type, newexpr, dofold);
673 /* Convert (int)logb(d) -> ilogb(d). */
674 if (optimize
675 && flag_unsafe_math_optimizations
676 && !flag_trapping_math && !flag_errno_math && flag_finite_math_only
677 && integer_type_node
678 && (outprec > TYPE_PRECISION (integer_type_node)
679 || (outprec == TYPE_PRECISION (integer_type_node)
680 && !TYPE_UNSIGNED (type))))
682 tree s_expr = strip_float_extensions (expr);
683 tree s_intype = TREE_TYPE (s_expr);
684 const enum built_in_function fcode = builtin_mathfn_code (s_expr);
685 tree fn = 0;
687 switch (fcode)
689 CASE_FLT_FN (BUILT_IN_LOGB):
690 fn = mathfn_built_in (s_intype, BUILT_IN_ILOGB);
691 break;
693 default:
694 break;
697 if (fn)
699 tree newexpr = build_call_expr (fn, 1, CALL_EXPR_ARG (s_expr, 0));
700 return convert_to_integer_1 (type, newexpr, dofold);
704 switch (TREE_CODE (intype))
706 case POINTER_TYPE:
707 case REFERENCE_TYPE:
708 if (integer_zerop (expr)
709 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (expr)))
710 return build_int_cst (type, 0);
712 /* Convert to an unsigned integer of the correct width first, and from
713 there widen/truncate to the required type. Some targets support the
714 coexistence of multiple valid pointer sizes, so fetch the one we need
715 from the type. */
716 if (!dofold)
717 return build1 (CONVERT_EXPR, type, expr);
718 expr = fold_build1 (CONVERT_EXPR,
719 lang_hooks.types.type_for_size
720 (TYPE_PRECISION (intype), 0),
721 expr);
722 return fold_convert (type, expr);
724 case INTEGER_TYPE:
725 case ENUMERAL_TYPE:
726 case BOOLEAN_TYPE:
727 case OFFSET_TYPE:
728 /* If this is a logical operation, which just returns 0 or 1, we can
729 change the type of the expression. */
731 if (TREE_CODE_CLASS (ex_form) == tcc_comparison)
733 expr = copy_node (expr);
734 TREE_TYPE (expr) = type;
735 return expr;
738 /* If we are widening the type, put in an explicit conversion.
739 Similarly if we are not changing the width. After this, we know
740 we are truncating EXPR. */
742 else if (outprec >= inprec)
744 enum tree_code code;
746 /* If the precision of the EXPR's type is K bits and the
747 destination mode has more bits, and the sign is changing,
748 it is not safe to use a NOP_EXPR. For example, suppose
749 that EXPR's type is a 3-bit unsigned integer type, the
750 TYPE is a 3-bit signed integer type, and the machine mode
751 for the types is 8-bit QImode. In that case, the
752 conversion necessitates an explicit sign-extension. In
753 the signed-to-unsigned case the high-order bits have to
754 be cleared. */
755 if (TYPE_UNSIGNED (type) != TYPE_UNSIGNED (TREE_TYPE (expr))
756 && !type_has_mode_precision_p (TREE_TYPE (expr)))
757 code = CONVERT_EXPR;
758 else
759 code = NOP_EXPR;
761 return maybe_fold_build1_loc (dofold, loc, code, type, expr);
764 /* If TYPE is an enumeral type or a type with a precision less
765 than the number of bits in its mode, do the conversion to the
766 type corresponding to its mode, then do a nop conversion
767 to TYPE. */
768 else if (TREE_CODE (type) == ENUMERAL_TYPE
769 || maybe_ne (outprec, GET_MODE_PRECISION (TYPE_MODE (type))))
771 expr
772 = convert_to_integer_1 (lang_hooks.types.type_for_mode
773 (TYPE_MODE (type), TYPE_UNSIGNED (type)),
774 expr, dofold);
775 return maybe_fold_build1_loc (dofold, loc, NOP_EXPR, type, expr);
778 /* Here detect when we can distribute the truncation down past some
779 arithmetic. For example, if adding two longs and converting to an
780 int, we can equally well convert both to ints and then add.
781 For the operations handled here, such truncation distribution
782 is always safe.
783 It is desirable in these cases:
784 1) when truncating down to full-word from a larger size
785 2) when truncating takes no work.
786 3) when at least one operand of the arithmetic has been extended
787 (as by C's default conversions). In this case we need two conversions
788 if we do the arithmetic as already requested, so we might as well
789 truncate both and then combine. Perhaps that way we need only one.
791 Note that in general we cannot do the arithmetic in a type
792 shorter than the desired result of conversion, even if the operands
793 are both extended from a shorter type, because they might overflow
794 if combined in that type. The exceptions to this--the times when
795 two narrow values can be combined in their narrow type even to
796 make a wider result--are handled by "shorten" in build_binary_op. */
798 if (dofold)
799 switch (ex_form)
801 case RSHIFT_EXPR:
802 /* We can pass truncation down through right shifting
803 when the shift count is a nonpositive constant. */
804 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
805 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) <= 0)
806 goto trunc1;
807 break;
809 case LSHIFT_EXPR:
810 /* We can pass truncation down through left shifting
811 when the shift count is a nonnegative constant and
812 the target type is unsigned. */
813 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
814 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) >= 0
815 && TYPE_UNSIGNED (type)
816 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
818 /* If shift count is less than the width of the truncated type,
819 really shift. */
820 if (tree_int_cst_lt (TREE_OPERAND (expr, 1), TYPE_SIZE (type)))
821 /* In this case, shifting is like multiplication. */
822 goto trunc1;
823 else
825 /* If it is >= that width, result is zero.
826 Handling this with trunc1 would give the wrong result:
827 (int) ((long long) a << 32) is well defined (as 0)
828 but (int) a << 32 is undefined and would get a
829 warning. */
831 tree t = build_int_cst (type, 0);
833 /* If the original expression had side-effects, we must
834 preserve it. */
835 if (TREE_SIDE_EFFECTS (expr))
836 return build2 (COMPOUND_EXPR, type, expr, t);
837 else
838 return t;
841 break;
843 case TRUNC_DIV_EXPR:
845 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), NULL_TREE);
846 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), NULL_TREE);
848 /* Don't distribute unless the output precision is at least as
849 big as the actual inputs and it has the same signedness. */
850 if (outprec >= TYPE_PRECISION (TREE_TYPE (arg0))
851 && outprec >= TYPE_PRECISION (TREE_TYPE (arg1))
852 /* If signedness of arg0 and arg1 don't match,
853 we can't necessarily find a type to compare them in. */
854 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
855 == TYPE_UNSIGNED (TREE_TYPE (arg1)))
856 /* Do not change the sign of the division. */
857 && (TYPE_UNSIGNED (TREE_TYPE (expr))
858 == TYPE_UNSIGNED (TREE_TYPE (arg0)))
859 /* Either require unsigned division or a division by
860 a constant that is not -1. */
861 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
862 || (TREE_CODE (arg1) == INTEGER_CST
863 && !integer_all_onesp (arg1))))
865 tree tem = do_narrow (loc, ex_form, type, arg0, arg1,
866 expr, inprec, outprec, dofold);
867 if (tem)
868 return tem;
870 break;
873 case MAX_EXPR:
874 case MIN_EXPR:
875 case MULT_EXPR:
877 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), type);
878 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), type);
880 /* Don't distribute unless the output precision is at least as
881 big as the actual inputs. Otherwise, the comparison of the
882 truncated values will be wrong. */
883 if (outprec >= TYPE_PRECISION (TREE_TYPE (arg0))
884 && outprec >= TYPE_PRECISION (TREE_TYPE (arg1))
885 /* If signedness of arg0 and arg1 don't match,
886 we can't necessarily find a type to compare them in. */
887 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
888 == TYPE_UNSIGNED (TREE_TYPE (arg1))))
889 goto trunc1;
890 break;
893 case PLUS_EXPR:
894 case MINUS_EXPR:
895 case BIT_AND_EXPR:
896 case BIT_IOR_EXPR:
897 case BIT_XOR_EXPR:
898 trunc1:
900 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), type);
901 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), type);
903 /* Do not try to narrow operands of pointer subtraction;
904 that will interfere with other folding. */
905 if (ex_form == MINUS_EXPR
906 && CONVERT_EXPR_P (arg0)
907 && CONVERT_EXPR_P (arg1)
908 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
909 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
910 break;
912 if (outprec >= BITS_PER_WORD
913 || targetm.truly_noop_truncation (outprec, inprec)
914 || inprec > TYPE_PRECISION (TREE_TYPE (arg0))
915 || inprec > TYPE_PRECISION (TREE_TYPE (arg1)))
917 tree tem = do_narrow (loc, ex_form, type, arg0, arg1,
918 expr, inprec, outprec, dofold);
919 if (tem)
920 return tem;
923 break;
925 case NEGATE_EXPR:
926 /* Using unsigned arithmetic for signed types may hide overflow
927 bugs. */
928 if (!TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (expr, 0)))
929 && sanitize_flags_p (SANITIZE_SI_OVERFLOW))
930 break;
931 /* Fall through. */
932 case BIT_NOT_EXPR:
933 /* This is not correct for ABS_EXPR,
934 since we must test the sign before truncation. */
936 /* Do the arithmetic in type TYPEX,
937 then convert result to TYPE. */
938 tree typex = type;
940 /* Can't do arithmetic in enumeral types
941 so use an integer type that will hold the values. */
942 if (TREE_CODE (typex) == ENUMERAL_TYPE)
943 typex
944 = lang_hooks.types.type_for_size (TYPE_PRECISION (typex),
945 TYPE_UNSIGNED (typex));
947 if (!TYPE_UNSIGNED (typex))
948 typex = unsigned_type_for (typex);
949 return convert (type,
950 fold_build1 (ex_form, typex,
951 convert (typex,
952 TREE_OPERAND (expr, 0))));
955 CASE_CONVERT:
957 tree argtype = TREE_TYPE (TREE_OPERAND (expr, 0));
958 /* Don't introduce a "can't convert between vector values
959 of different size" error. */
960 if (TREE_CODE (argtype) == VECTOR_TYPE
961 && maybe_ne (GET_MODE_SIZE (TYPE_MODE (argtype)),
962 GET_MODE_SIZE (TYPE_MODE (type))))
963 break;
965 /* If truncating after truncating, might as well do all at once.
966 If truncating after extending, we may get rid of wasted work. */
967 return convert (type, get_unwidened (TREE_OPERAND (expr, 0), type));
969 case COND_EXPR:
970 /* It is sometimes worthwhile to push the narrowing down through
971 the conditional and never loses. A COND_EXPR may have a throw
972 as one operand, which then has void type. Just leave void
973 operands as they are. */
974 return
975 fold_build3 (COND_EXPR, type, TREE_OPERAND (expr, 0),
976 VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1)))
977 ? TREE_OPERAND (expr, 1)
978 : convert (type, TREE_OPERAND (expr, 1)),
979 VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 2)))
980 ? TREE_OPERAND (expr, 2)
981 : convert (type, TREE_OPERAND (expr, 2)));
983 default:
984 break;
987 /* When parsing long initializers, we might end up with a lot of casts.
988 Shortcut this. */
989 if (TREE_CODE (tree_strip_any_location_wrapper (expr)) == INTEGER_CST)
990 return fold_convert (type, expr);
991 return build1 (CONVERT_EXPR, type, expr);
993 case REAL_TYPE:
994 if (sanitize_flags_p (SANITIZE_FLOAT_CAST)
995 && current_function_decl != NULL_TREE)
997 expr = save_expr (expr);
998 tree check = ubsan_instrument_float_cast (loc, type, expr);
999 expr = build1 (FIX_TRUNC_EXPR, type, expr);
1000 if (check == NULL_TREE)
1001 return expr;
1002 return maybe_fold_build2_loc (dofold, loc, COMPOUND_EXPR,
1003 TREE_TYPE (expr), check, expr);
1005 else
1006 return build1 (FIX_TRUNC_EXPR, type, expr);
1008 case FIXED_POINT_TYPE:
1009 return build1 (FIXED_CONVERT_EXPR, type, expr);
1011 case COMPLEX_TYPE:
1012 expr = maybe_fold_build1_loc (dofold, loc, REALPART_EXPR,
1013 TREE_TYPE (TREE_TYPE (expr)), expr);
1014 return convert (type, expr);
1016 case VECTOR_TYPE:
1017 if (!tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (TREE_TYPE (expr))))
1019 error ("can%'t convert a vector of type %qT"
1020 " to type %qT which has different size",
1021 TREE_TYPE (expr), type);
1022 return error_mark_node;
1024 return build1 (VIEW_CONVERT_EXPR, type, expr);
1026 default:
1027 error ("aggregate value used where an integer was expected");
1028 return convert (type, integer_zero_node);
1032 /* Convert EXPR to some integer (or enum) type TYPE.
1034 EXPR must be pointer, integer, discrete (enum, char, or bool), float,
1035 fixed-point or vector; in other cases error is called.
1037 The result of this is always supposed to be a newly created tree node
1038 not in use in any existing structure. */
1040 tree
1041 convert_to_integer (tree type, tree expr)
1043 return convert_to_integer_1 (type, expr, true);
1046 /* A wrapper around convert_to_complex_1 that only folds the
1047 expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P. */
1049 tree
1050 convert_to_integer_maybe_fold (tree type, tree expr, bool dofold)
1052 tree result
1053 = convert_to_integer_1 (type, expr,
1054 dofold || CONSTANT_CLASS_OR_WRAPPER_P (expr));
1055 return preserve_any_location_wrapper (result, expr);
1058 /* Convert EXPR to the complex type TYPE in the usual ways. If FOLD_P is
1059 true, try to fold the expression. */
1061 static tree
1062 convert_to_complex_1 (tree type, tree expr, bool fold_p)
1064 location_t loc = EXPR_LOCATION (expr);
1065 tree subtype = TREE_TYPE (type);
1067 switch (TREE_CODE (TREE_TYPE (expr)))
1069 case REAL_TYPE:
1070 case FIXED_POINT_TYPE:
1071 case INTEGER_TYPE:
1072 case ENUMERAL_TYPE:
1073 case BOOLEAN_TYPE:
1074 return build2 (COMPLEX_EXPR, type, convert (subtype, expr),
1075 convert (subtype, integer_zero_node));
1077 case COMPLEX_TYPE:
1079 tree elt_type = TREE_TYPE (TREE_TYPE (expr));
1081 if (TYPE_MAIN_VARIANT (elt_type) == TYPE_MAIN_VARIANT (subtype))
1082 return expr;
1083 else if (TREE_CODE (expr) == COMPOUND_EXPR)
1085 tree t = convert_to_complex_1 (type, TREE_OPERAND (expr, 1),
1086 fold_p);
1087 if (t == TREE_OPERAND (expr, 1))
1088 return expr;
1089 return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR,
1090 TREE_TYPE (t), TREE_OPERAND (expr, 0), t);
1092 else if (TREE_CODE (expr) == COMPLEX_EXPR)
1093 return maybe_fold_build2_loc (fold_p, loc, COMPLEX_EXPR, type,
1094 convert (subtype,
1095 TREE_OPERAND (expr, 0)),
1096 convert (subtype,
1097 TREE_OPERAND (expr, 1)));
1098 else
1100 expr = save_expr (expr);
1101 tree realp = maybe_fold_build1_loc (fold_p, loc, REALPART_EXPR,
1102 TREE_TYPE (TREE_TYPE (expr)),
1103 expr);
1104 tree imagp = maybe_fold_build1_loc (fold_p, loc, IMAGPART_EXPR,
1105 TREE_TYPE (TREE_TYPE (expr)),
1106 expr);
1107 return maybe_fold_build2_loc (fold_p, loc, COMPLEX_EXPR, type,
1108 convert (subtype, realp),
1109 convert (subtype, imagp));
1113 case POINTER_TYPE:
1114 case REFERENCE_TYPE:
1115 error ("pointer value used where a complex was expected");
1116 return convert_to_complex_1 (type, integer_zero_node, fold_p);
1118 default:
1119 error ("aggregate value used where a complex was expected");
1120 return convert_to_complex_1 (type, integer_zero_node, fold_p);
1124 /* A wrapper around convert_to_complex_1 that always folds the
1125 expression. */
1127 tree
1128 convert_to_complex (tree type, tree expr)
1130 return convert_to_complex_1 (type, expr, true);
1133 /* A wrapper around convert_to_complex_1 that only folds the
1134 expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P. */
1136 tree
1137 convert_to_complex_maybe_fold (tree type, tree expr, bool dofold)
1139 tree result
1140 = convert_to_complex_1 (type, expr,
1141 dofold || CONSTANT_CLASS_OR_WRAPPER_P (expr));
1142 return preserve_any_location_wrapper (result, expr);
1145 /* Convert EXPR to the vector type TYPE in the usual ways. */
1147 tree
1148 convert_to_vector (tree type, tree expr)
1150 switch (TREE_CODE (TREE_TYPE (expr)))
1152 case INTEGER_TYPE:
1153 case VECTOR_TYPE:
1154 if (!tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (TREE_TYPE (expr))))
1156 error ("can%'t convert a value of type %qT"
1157 " to vector type %qT which has different size",
1158 TREE_TYPE (expr), type);
1159 return error_mark_node;
1161 return build1 (VIEW_CONVERT_EXPR, type, expr);
1163 default:
1164 error ("can%'t convert value to a vector");
1165 return error_mark_node;
1169 /* Convert EXPR to some fixed-point type TYPE.
1171 EXPR must be fixed-point, float, integer, or enumeral;
1172 in other cases error is called. */
1174 tree
1175 convert_to_fixed (tree type, tree expr)
1177 if (integer_zerop (expr))
1179 tree fixed_zero_node = build_fixed (type, FCONST0 (TYPE_MODE (type)));
1180 return fixed_zero_node;
1182 else if (integer_onep (expr) && ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)))
1184 tree fixed_one_node = build_fixed (type, FCONST1 (TYPE_MODE (type)));
1185 return fixed_one_node;
1188 switch (TREE_CODE (TREE_TYPE (expr)))
1190 case FIXED_POINT_TYPE:
1191 case INTEGER_TYPE:
1192 case ENUMERAL_TYPE:
1193 case BOOLEAN_TYPE:
1194 case REAL_TYPE:
1195 return build1 (FIXED_CONVERT_EXPR, type, expr);
1197 case COMPLEX_TYPE:
1198 return convert (type,
1199 fold_build1 (REALPART_EXPR,
1200 TREE_TYPE (TREE_TYPE (expr)), expr));
1202 default:
1203 error ("aggregate value used where a fixed-point was expected");
1204 return error_mark_node;
1208 #if CHECKING_P
1210 namespace selftest {
1212 /* Selftests for conversions. */
1214 static void
1215 test_convert_to_integer_maybe_fold (tree orig_type, tree new_type)
1217 /* Calling convert_to_integer_maybe_fold on an INTEGER_CST. */
1219 tree orig_cst = build_int_cst (orig_type, 42);
1221 /* Verify that convert_to_integer_maybe_fold on a constant returns a new
1222 constant of the new type, unless the types are the same, in which
1223 case verify it's a no-op. */
1225 tree result = convert_to_integer_maybe_fold (new_type,
1226 orig_cst, false);
1227 if (orig_type != new_type)
1229 ASSERT_EQ (TREE_TYPE (result), new_type);
1230 ASSERT_EQ (TREE_CODE (result), INTEGER_CST);
1232 else
1233 ASSERT_EQ (result, orig_cst);
1236 /* Calling convert_to_integer_maybe_fold on a location wrapper around
1237 an INTEGER_CST.
1239 Verify that convert_to_integer_maybe_fold on a location wrapper
1240 around a constant returns a new location wrapper around an equivalent
1241 constant, both of the new type, unless the types are the same,
1242 in which case the original wrapper should be returned. */
1244 const location_t loc = BUILTINS_LOCATION;
1245 tree wrapped_orig_cst = maybe_wrap_with_location (orig_cst, loc);
1246 tree result
1247 = convert_to_integer_maybe_fold (new_type, wrapped_orig_cst, false);
1248 ASSERT_EQ (TREE_TYPE (result), new_type);
1249 ASSERT_EQ (EXPR_LOCATION (result), loc);
1250 ASSERT_TRUE (location_wrapper_p (result));
1251 ASSERT_EQ (TREE_TYPE (TREE_OPERAND (result, 0)), new_type);
1252 ASSERT_EQ (TREE_CODE (TREE_OPERAND (result, 0)), INTEGER_CST);
1254 if (orig_type == new_type)
1255 ASSERT_EQ (result, wrapped_orig_cst);
1259 /* Verify that convert_to_integer_maybe_fold preserves locations. */
1261 static void
1262 test_convert_to_integer_maybe_fold ()
1264 /* char -> long. */
1265 test_convert_to_integer_maybe_fold (char_type_node, long_integer_type_node);
1267 /* char -> char. */
1268 test_convert_to_integer_maybe_fold (char_type_node, char_type_node);
1270 /* long -> char. */
1271 test_convert_to_integer_maybe_fold (char_type_node, long_integer_type_node);
1273 /* long -> long. */
1274 test_convert_to_integer_maybe_fold (long_integer_type_node,
1275 long_integer_type_node);
1278 /* Run all of the selftests within this file. */
1280 void
1281 convert_c_tests ()
1283 test_convert_to_integer_maybe_fold ();
1286 } // namespace selftest
1288 #endif /* CHECKING_P */