Make vectorizable_load/store handle IFN_MASK_LOAD/STORE
[official-gcc.git] / gcc / convert.c
blobe168a266ff4b8d149d19de0fcbbfc5da1ad67f73
1 /* Utility routines for data type conversion for GCC.
2 Copyright (C) 1987-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* These routines are somewhat language-independent utility function
22 intended to be called by the language-specific convert () functions. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "target.h"
28 #include "tree.h"
29 #include "diagnostic-core.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
32 #include "convert.h"
33 #include "langhooks.h"
34 #include "builtins.h"
35 #include "ubsan.h"
36 #include "stringpool.h"
37 #include "attribs.h"
38 #include "asan.h"
40 #define maybe_fold_build1_loc(FOLD_P, LOC, CODE, TYPE, EXPR) \
41 ((FOLD_P) ? fold_build1_loc (LOC, CODE, TYPE, EXPR) \
42 : build1_loc (LOC, CODE, TYPE, EXPR))
43 #define maybe_fold_build2_loc(FOLD_P, LOC, CODE, TYPE, EXPR1, EXPR2) \
44 ((FOLD_P) ? fold_build2_loc (LOC, CODE, TYPE, EXPR1, EXPR2) \
45 : build2_loc (LOC, CODE, TYPE, EXPR1, EXPR2))
47 /* Convert EXPR to some pointer or reference type TYPE.
48 EXPR must be pointer, reference, integer, enumeral, or literal zero;
49 in other cases error is called. If FOLD_P is true, try to fold the
50 expression. */
52 static tree
53 convert_to_pointer_1 (tree type, tree expr, bool fold_p)
55 location_t loc = EXPR_LOCATION (expr);
56 if (TREE_TYPE (expr) == type)
57 return expr;
59 switch (TREE_CODE (TREE_TYPE (expr)))
61 case POINTER_TYPE:
62 case REFERENCE_TYPE:
64 /* If the pointers point to different address spaces, conversion needs
65 to be done via a ADDR_SPACE_CONVERT_EXPR instead of a NOP_EXPR. */
66 addr_space_t to_as = TYPE_ADDR_SPACE (TREE_TYPE (type));
67 addr_space_t from_as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr)));
69 if (to_as == from_as)
70 return maybe_fold_build1_loc (fold_p, loc, NOP_EXPR, type, expr);
71 else
72 return maybe_fold_build1_loc (fold_p, loc, ADDR_SPACE_CONVERT_EXPR,
73 type, expr);
76 case INTEGER_TYPE:
77 case ENUMERAL_TYPE:
78 case BOOLEAN_TYPE:
80 /* If the input precision differs from the target pointer type
81 precision, first convert the input expression to an integer type of
82 the target precision. Some targets, e.g. VMS, need several pointer
83 sizes to coexist so the latter isn't necessarily POINTER_SIZE. */
84 unsigned int pprec = TYPE_PRECISION (type);
85 unsigned int eprec = TYPE_PRECISION (TREE_TYPE (expr));
87 if (eprec != pprec)
88 expr
89 = maybe_fold_build1_loc (fold_p, loc, NOP_EXPR,
90 lang_hooks.types.type_for_size (pprec, 0),
91 expr);
93 return maybe_fold_build1_loc (fold_p, loc, CONVERT_EXPR, type, expr);
95 default:
96 error ("cannot convert to a pointer type");
97 return convert_to_pointer_1 (type, integer_zero_node, fold_p);
101 /* A wrapper around convert_to_pointer_1 that always folds the
102 expression. */
104 tree
105 convert_to_pointer (tree type, tree expr)
107 return convert_to_pointer_1 (type, expr, true);
110 /* A wrapper around convert_to_pointer_1 that only folds the
111 expression if DOFOLD, or if it is CONSTANT_CLASS_P. */
113 tree
114 convert_to_pointer_maybe_fold (tree type, tree expr, bool dofold)
116 return convert_to_pointer_1 (type, expr, dofold || CONSTANT_CLASS_P (expr));
119 /* Convert EXPR to some floating-point type TYPE.
121 EXPR must be float, fixed-point, integer, or enumeral;
122 in other cases error is called. If FOLD_P is true, try to fold
123 the expression. */
125 static tree
126 convert_to_real_1 (tree type, tree expr, bool fold_p)
128 enum built_in_function fcode = builtin_mathfn_code (expr);
129 tree itype = TREE_TYPE (expr);
130 location_t loc = EXPR_LOCATION (expr);
132 if (TREE_CODE (expr) == COMPOUND_EXPR)
134 tree t = convert_to_real_1 (type, TREE_OPERAND (expr, 1), fold_p);
135 if (t == TREE_OPERAND (expr, 1))
136 return expr;
137 return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR, TREE_TYPE (t),
138 TREE_OPERAND (expr, 0), t);
141 /* Disable until we figure out how to decide whether the functions are
142 present in runtime. */
143 /* Convert (float)sqrt((double)x) where x is float into sqrtf(x) */
144 if (optimize
145 && (TYPE_MODE (type) == TYPE_MODE (double_type_node)
146 || TYPE_MODE (type) == TYPE_MODE (float_type_node)))
148 switch (fcode)
150 #define CASE_MATHFN(FN) case BUILT_IN_##FN: case BUILT_IN_##FN##L:
151 CASE_MATHFN (COSH)
152 CASE_MATHFN (EXP)
153 CASE_MATHFN (EXP10)
154 CASE_MATHFN (EXP2)
155 CASE_MATHFN (EXPM1)
156 CASE_MATHFN (GAMMA)
157 CASE_MATHFN (J0)
158 CASE_MATHFN (J1)
159 CASE_MATHFN (LGAMMA)
160 CASE_MATHFN (POW10)
161 CASE_MATHFN (SINH)
162 CASE_MATHFN (TGAMMA)
163 CASE_MATHFN (Y0)
164 CASE_MATHFN (Y1)
165 /* The above functions may set errno differently with float
166 input or output so this transformation is not safe with
167 -fmath-errno. */
168 if (flag_errno_math)
169 break;
170 gcc_fallthrough ();
171 CASE_MATHFN (ACOS)
172 CASE_MATHFN (ACOSH)
173 CASE_MATHFN (ASIN)
174 CASE_MATHFN (ASINH)
175 CASE_MATHFN (ATAN)
176 CASE_MATHFN (ATANH)
177 CASE_MATHFN (CBRT)
178 CASE_MATHFN (COS)
179 CASE_MATHFN (ERF)
180 CASE_MATHFN (ERFC)
181 CASE_MATHFN (LOG)
182 CASE_MATHFN (LOG10)
183 CASE_MATHFN (LOG2)
184 CASE_MATHFN (LOG1P)
185 CASE_MATHFN (SIN)
186 CASE_MATHFN (TAN)
187 CASE_MATHFN (TANH)
188 /* The above functions are not safe to do this conversion. */
189 if (!flag_unsafe_math_optimizations)
190 break;
191 gcc_fallthrough ();
192 CASE_MATHFN (SQRT)
193 CASE_MATHFN (FABS)
194 CASE_MATHFN (LOGB)
195 #undef CASE_MATHFN
197 tree arg0 = strip_float_extensions (CALL_EXPR_ARG (expr, 0));
198 tree newtype = type;
200 /* We have (outertype)sqrt((innertype)x). Choose the wider mode from
201 the both as the safe type for operation. */
202 if (TYPE_PRECISION (TREE_TYPE (arg0)) > TYPE_PRECISION (type))
203 newtype = TREE_TYPE (arg0);
205 /* We consider to convert
207 (T1) sqrtT2 ((T2) exprT3)
209 (T1) sqrtT4 ((T4) exprT3)
211 , where T1 is TYPE, T2 is ITYPE, T3 is TREE_TYPE (ARG0),
212 and T4 is NEWTYPE. All those types are of floating point types.
213 T4 (NEWTYPE) should be narrower than T2 (ITYPE). This conversion
214 is safe only if P1 >= P2*2+2, where P1 and P2 are precisions of
215 T2 and T4. See the following URL for a reference:
216 http://stackoverflow.com/questions/9235456/determining-
217 floating-point-square-root
219 if ((fcode == BUILT_IN_SQRT || fcode == BUILT_IN_SQRTL)
220 && !flag_unsafe_math_optimizations)
222 /* The following conversion is unsafe even the precision condition
223 below is satisfied:
225 (float) sqrtl ((long double) double_val) -> (float) sqrt (double_val)
227 if (TYPE_MODE (type) != TYPE_MODE (newtype))
228 break;
230 int p1 = REAL_MODE_FORMAT (TYPE_MODE (itype))->p;
231 int p2 = REAL_MODE_FORMAT (TYPE_MODE (newtype))->p;
232 if (p1 < p2 * 2 + 2)
233 break;
236 /* Be careful about integer to fp conversions.
237 These may overflow still. */
238 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
239 && TYPE_PRECISION (newtype) < TYPE_PRECISION (itype)
240 && (TYPE_MODE (newtype) == TYPE_MODE (double_type_node)
241 || TYPE_MODE (newtype) == TYPE_MODE (float_type_node)))
243 tree fn = mathfn_built_in (newtype, fcode);
244 if (fn)
246 tree arg = convert_to_real_1 (newtype, arg0, fold_p);
247 expr = build_call_expr (fn, 1, arg);
248 if (newtype == type)
249 return expr;
253 default:
254 break;
258 /* Propagate the cast into the operation. */
259 if (itype != type && FLOAT_TYPE_P (type))
260 switch (TREE_CODE (expr))
262 /* Convert (float)-x into -(float)x. This is safe for
263 round-to-nearest rounding mode when the inner type is float. */
264 case ABS_EXPR:
265 case NEGATE_EXPR:
266 if (!flag_rounding_math
267 && FLOAT_TYPE_P (itype)
268 && TYPE_PRECISION (type) < TYPE_PRECISION (itype))
270 tree arg = convert_to_real_1 (type, TREE_OPERAND (expr, 0),
271 fold_p);
272 return build1 (TREE_CODE (expr), type, arg);
274 break;
275 /* Convert (outertype)((innertype0)a+(innertype1)b)
276 into ((newtype)a+(newtype)b) where newtype
277 is the widest mode from all of these. */
278 case PLUS_EXPR:
279 case MINUS_EXPR:
280 case MULT_EXPR:
281 case RDIV_EXPR:
283 tree arg0 = strip_float_extensions (TREE_OPERAND (expr, 0));
284 tree arg1 = strip_float_extensions (TREE_OPERAND (expr, 1));
286 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
287 && FLOAT_TYPE_P (TREE_TYPE (arg1))
288 && DECIMAL_FLOAT_TYPE_P (itype) == DECIMAL_FLOAT_TYPE_P (type))
290 tree newtype = type;
292 if (TYPE_MODE (TREE_TYPE (arg0)) == SDmode
293 || TYPE_MODE (TREE_TYPE (arg1)) == SDmode
294 || TYPE_MODE (type) == SDmode)
295 newtype = dfloat32_type_node;
296 if (TYPE_MODE (TREE_TYPE (arg0)) == DDmode
297 || TYPE_MODE (TREE_TYPE (arg1)) == DDmode
298 || TYPE_MODE (type) == DDmode)
299 newtype = dfloat64_type_node;
300 if (TYPE_MODE (TREE_TYPE (arg0)) == TDmode
301 || TYPE_MODE (TREE_TYPE (arg1)) == TDmode
302 || TYPE_MODE (type) == TDmode)
303 newtype = dfloat128_type_node;
304 if (newtype == dfloat32_type_node
305 || newtype == dfloat64_type_node
306 || newtype == dfloat128_type_node)
308 expr = build2 (TREE_CODE (expr), newtype,
309 convert_to_real_1 (newtype, arg0,
310 fold_p),
311 convert_to_real_1 (newtype, arg1,
312 fold_p));
313 if (newtype == type)
314 return expr;
315 break;
318 if (TYPE_PRECISION (TREE_TYPE (arg0)) > TYPE_PRECISION (newtype))
319 newtype = TREE_TYPE (arg0);
320 if (TYPE_PRECISION (TREE_TYPE (arg1)) > TYPE_PRECISION (newtype))
321 newtype = TREE_TYPE (arg1);
322 /* Sometimes this transformation is safe (cannot
323 change results through affecting double rounding
324 cases) and sometimes it is not. If NEWTYPE is
325 wider than TYPE, e.g. (float)((long double)double
326 + (long double)double) converted to
327 (float)(double + double), the transformation is
328 unsafe regardless of the details of the types
329 involved; double rounding can arise if the result
330 of NEWTYPE arithmetic is a NEWTYPE value half way
331 between two representable TYPE values but the
332 exact value is sufficiently different (in the
333 right direction) for this difference to be
334 visible in ITYPE arithmetic. If NEWTYPE is the
335 same as TYPE, however, the transformation may be
336 safe depending on the types involved: it is safe
337 if the ITYPE has strictly more than twice as many
338 mantissa bits as TYPE, can represent infinities
339 and NaNs if the TYPE can, and has sufficient
340 exponent range for the product or ratio of two
341 values representable in the TYPE to be within the
342 range of normal values of ITYPE. */
343 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (itype)
344 && (flag_unsafe_math_optimizations
345 || (TYPE_PRECISION (newtype) == TYPE_PRECISION (type)
346 && real_can_shorten_arithmetic (TYPE_MODE (itype),
347 TYPE_MODE (type))
348 && !excess_precision_type (newtype))))
350 expr = build2 (TREE_CODE (expr), newtype,
351 convert_to_real_1 (newtype, arg0,
352 fold_p),
353 convert_to_real_1 (newtype, arg1,
354 fold_p));
355 if (newtype == type)
356 return expr;
360 break;
361 default:
362 break;
365 switch (TREE_CODE (TREE_TYPE (expr)))
367 case REAL_TYPE:
368 /* Ignore the conversion if we don't need to store intermediate
369 results and neither type is a decimal float. */
370 return build1_loc (loc,
371 (flag_float_store
372 || DECIMAL_FLOAT_TYPE_P (type)
373 || DECIMAL_FLOAT_TYPE_P (itype))
374 ? CONVERT_EXPR : NOP_EXPR, type, expr);
376 case INTEGER_TYPE:
377 case ENUMERAL_TYPE:
378 case BOOLEAN_TYPE:
379 return build1 (FLOAT_EXPR, type, expr);
381 case FIXED_POINT_TYPE:
382 return build1 (FIXED_CONVERT_EXPR, type, expr);
384 case COMPLEX_TYPE:
385 return convert (type,
386 maybe_fold_build1_loc (fold_p, loc, REALPART_EXPR,
387 TREE_TYPE (TREE_TYPE (expr)),
388 expr));
390 case POINTER_TYPE:
391 case REFERENCE_TYPE:
392 error ("pointer value used where a floating point value was expected");
393 return convert_to_real_1 (type, integer_zero_node, fold_p);
395 default:
396 error ("aggregate value used where a float was expected");
397 return convert_to_real_1 (type, integer_zero_node, fold_p);
401 /* A wrapper around convert_to_real_1 that always folds the
402 expression. */
404 tree
405 convert_to_real (tree type, tree expr)
407 return convert_to_real_1 (type, expr, true);
410 /* A wrapper around convert_to_real_1 that only folds the
411 expression if DOFOLD, or if it is CONSTANT_CLASS_P. */
413 tree
414 convert_to_real_maybe_fold (tree type, tree expr, bool dofold)
416 return convert_to_real_1 (type, expr, dofold || CONSTANT_CLASS_P (expr));
419 /* Try to narrow EX_FORM ARG0 ARG1 in narrowed arg types producing a
420 result in TYPE. */
422 static tree
423 do_narrow (location_t loc,
424 enum tree_code ex_form, tree type, tree arg0, tree arg1,
425 tree expr, unsigned inprec, unsigned outprec, bool dofold)
427 /* Do the arithmetic in type TYPEX,
428 then convert result to TYPE. */
429 tree typex = type;
431 /* Can't do arithmetic in enumeral types
432 so use an integer type that will hold the values. */
433 if (TREE_CODE (typex) == ENUMERAL_TYPE)
434 typex = lang_hooks.types.type_for_size (TYPE_PRECISION (typex),
435 TYPE_UNSIGNED (typex));
437 /* The type demotion below might cause doing unsigned arithmetic
438 instead of signed, and thus hide overflow bugs. */
439 if ((ex_form == PLUS_EXPR || ex_form == MINUS_EXPR)
440 && !TYPE_UNSIGNED (typex)
441 && sanitize_flags_p (SANITIZE_SI_OVERFLOW))
442 return NULL_TREE;
444 /* But now perhaps TYPEX is as wide as INPREC.
445 In that case, do nothing special here.
446 (Otherwise would recurse infinitely in convert. */
447 if (TYPE_PRECISION (typex) != inprec)
449 /* Don't do unsigned arithmetic where signed was wanted,
450 or vice versa.
451 Exception: if both of the original operands were
452 unsigned then we can safely do the work as unsigned.
453 Exception: shift operations take their type solely
454 from the first argument.
455 Exception: the LSHIFT_EXPR case above requires that
456 we perform this operation unsigned lest we produce
457 signed-overflow undefinedness.
458 And we may need to do it as unsigned
459 if we truncate to the original size. */
460 if (TYPE_UNSIGNED (TREE_TYPE (expr))
461 || (TYPE_UNSIGNED (TREE_TYPE (arg0))
462 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
463 || ex_form == LSHIFT_EXPR
464 || ex_form == RSHIFT_EXPR
465 || ex_form == LROTATE_EXPR
466 || ex_form == RROTATE_EXPR))
467 || ex_form == LSHIFT_EXPR
468 /* If we have !flag_wrapv, and either ARG0 or
469 ARG1 is of a signed type, we have to do
470 PLUS_EXPR, MINUS_EXPR or MULT_EXPR in an unsigned
471 type in case the operation in outprec precision
472 could overflow. Otherwise, we would introduce
473 signed-overflow undefinedness. */
474 || ((!TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
475 || !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
476 && ((TYPE_PRECISION (TREE_TYPE (arg0)) * 2u
477 > outprec)
478 || (TYPE_PRECISION (TREE_TYPE (arg1)) * 2u
479 > outprec))
480 && (ex_form == PLUS_EXPR
481 || ex_form == MINUS_EXPR
482 || ex_form == MULT_EXPR)))
484 if (!TYPE_UNSIGNED (typex))
485 typex = unsigned_type_for (typex);
487 else
489 if (TYPE_UNSIGNED (typex))
490 typex = signed_type_for (typex);
492 /* We should do away with all this once we have a proper
493 type promotion/demotion pass, see PR45397. */
494 expr = maybe_fold_build2_loc (dofold, loc, ex_form, typex,
495 convert (typex, arg0),
496 convert (typex, arg1));
497 return convert (type, expr);
500 return NULL_TREE;
503 /* Convert EXPR to some integer (or enum) type TYPE.
505 EXPR must be pointer, integer, discrete (enum, char, or bool), float,
506 fixed-point or vector; in other cases error is called.
508 If DOFOLD is TRUE, we try to simplify newly-created patterns by folding.
510 The result of this is always supposed to be a newly created tree node
511 not in use in any existing structure. */
513 static tree
514 convert_to_integer_1 (tree type, tree expr, bool dofold)
516 enum tree_code ex_form = TREE_CODE (expr);
517 tree intype = TREE_TYPE (expr);
518 unsigned int inprec = element_precision (intype);
519 unsigned int outprec = element_precision (type);
520 location_t loc = EXPR_LOCATION (expr);
522 /* An INTEGER_TYPE cannot be incomplete, but an ENUMERAL_TYPE can
523 be. Consider `enum E = { a, b = (enum E) 3 };'. */
524 if (!COMPLETE_TYPE_P (type))
526 error ("conversion to incomplete type");
527 return error_mark_node;
530 if (ex_form == COMPOUND_EXPR)
532 tree t = convert_to_integer_1 (type, TREE_OPERAND (expr, 1), dofold);
533 if (t == TREE_OPERAND (expr, 1))
534 return expr;
535 return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR, TREE_TYPE (t),
536 TREE_OPERAND (expr, 0), t);
539 /* Convert e.g. (long)round(d) -> lround(d). */
540 /* If we're converting to char, we may encounter differing behavior
541 between converting from double->char vs double->long->char.
542 We're in "undefined" territory but we prefer to be conservative,
543 so only proceed in "unsafe" math mode. */
544 if (optimize
545 && (flag_unsafe_math_optimizations
546 || (long_integer_type_node
547 && outprec >= TYPE_PRECISION (long_integer_type_node))))
549 tree s_expr = strip_float_extensions (expr);
550 tree s_intype = TREE_TYPE (s_expr);
551 const enum built_in_function fcode = builtin_mathfn_code (s_expr);
552 tree fn = 0;
554 switch (fcode)
556 CASE_FLT_FN (BUILT_IN_CEIL):
557 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
558 /* Only convert in ISO C99 mode. */
559 if (!targetm.libc_has_function (function_c99_misc))
560 break;
561 if (outprec < TYPE_PRECISION (integer_type_node)
562 || (outprec == TYPE_PRECISION (integer_type_node)
563 && !TYPE_UNSIGNED (type)))
564 fn = mathfn_built_in (s_intype, BUILT_IN_ICEIL);
565 else if (outprec == TYPE_PRECISION (long_integer_type_node)
566 && !TYPE_UNSIGNED (type))
567 fn = mathfn_built_in (s_intype, BUILT_IN_LCEIL);
568 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
569 && !TYPE_UNSIGNED (type))
570 fn = mathfn_built_in (s_intype, BUILT_IN_LLCEIL);
571 break;
573 CASE_FLT_FN (BUILT_IN_FLOOR):
574 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
575 /* Only convert in ISO C99 mode. */
576 if (!targetm.libc_has_function (function_c99_misc))
577 break;
578 if (outprec < TYPE_PRECISION (integer_type_node)
579 || (outprec == TYPE_PRECISION (integer_type_node)
580 && !TYPE_UNSIGNED (type)))
581 fn = mathfn_built_in (s_intype, BUILT_IN_IFLOOR);
582 else if (outprec == TYPE_PRECISION (long_integer_type_node)
583 && !TYPE_UNSIGNED (type))
584 fn = mathfn_built_in (s_intype, BUILT_IN_LFLOOR);
585 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
586 && !TYPE_UNSIGNED (type))
587 fn = mathfn_built_in (s_intype, BUILT_IN_LLFLOOR);
588 break;
590 CASE_FLT_FN (BUILT_IN_ROUND):
591 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
592 /* Only convert in ISO C99 mode and with -fno-math-errno. */
593 if (!targetm.libc_has_function (function_c99_misc) || flag_errno_math)
594 break;
595 if (outprec < TYPE_PRECISION (integer_type_node)
596 || (outprec == TYPE_PRECISION (integer_type_node)
597 && !TYPE_UNSIGNED (type)))
598 fn = mathfn_built_in (s_intype, BUILT_IN_IROUND);
599 else if (outprec == TYPE_PRECISION (long_integer_type_node)
600 && !TYPE_UNSIGNED (type))
601 fn = mathfn_built_in (s_intype, BUILT_IN_LROUND);
602 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
603 && !TYPE_UNSIGNED (type))
604 fn = mathfn_built_in (s_intype, BUILT_IN_LLROUND);
605 break;
607 CASE_FLT_FN (BUILT_IN_NEARBYINT):
608 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
609 /* Only convert nearbyint* if we can ignore math exceptions. */
610 if (flag_trapping_math)
611 break;
612 gcc_fallthrough ();
613 CASE_FLT_FN (BUILT_IN_RINT):
614 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
615 /* Only convert in ISO C99 mode and with -fno-math-errno. */
616 if (!targetm.libc_has_function (function_c99_misc) || flag_errno_math)
617 break;
618 if (outprec < TYPE_PRECISION (integer_type_node)
619 || (outprec == TYPE_PRECISION (integer_type_node)
620 && !TYPE_UNSIGNED (type)))
621 fn = mathfn_built_in (s_intype, BUILT_IN_IRINT);
622 else if (outprec == TYPE_PRECISION (long_integer_type_node)
623 && !TYPE_UNSIGNED (type))
624 fn = mathfn_built_in (s_intype, BUILT_IN_LRINT);
625 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
626 && !TYPE_UNSIGNED (type))
627 fn = mathfn_built_in (s_intype, BUILT_IN_LLRINT);
628 break;
630 CASE_FLT_FN (BUILT_IN_TRUNC):
631 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
632 return convert_to_integer_1 (type, CALL_EXPR_ARG (s_expr, 0), dofold);
634 default:
635 break;
638 if (fn)
640 tree newexpr = build_call_expr (fn, 1, CALL_EXPR_ARG (s_expr, 0));
641 return convert_to_integer_1 (type, newexpr, dofold);
645 /* Convert (int)logb(d) -> ilogb(d). */
646 if (optimize
647 && flag_unsafe_math_optimizations
648 && !flag_trapping_math && !flag_errno_math && flag_finite_math_only
649 && integer_type_node
650 && (outprec > TYPE_PRECISION (integer_type_node)
651 || (outprec == TYPE_PRECISION (integer_type_node)
652 && !TYPE_UNSIGNED (type))))
654 tree s_expr = strip_float_extensions (expr);
655 tree s_intype = TREE_TYPE (s_expr);
656 const enum built_in_function fcode = builtin_mathfn_code (s_expr);
657 tree fn = 0;
659 switch (fcode)
661 CASE_FLT_FN (BUILT_IN_LOGB):
662 fn = mathfn_built_in (s_intype, BUILT_IN_ILOGB);
663 break;
665 default:
666 break;
669 if (fn)
671 tree newexpr = build_call_expr (fn, 1, CALL_EXPR_ARG (s_expr, 0));
672 return convert_to_integer_1 (type, newexpr, dofold);
676 switch (TREE_CODE (intype))
678 case POINTER_TYPE:
679 case REFERENCE_TYPE:
680 if (integer_zerop (expr) && !TREE_OVERFLOW (expr))
681 return build_int_cst (type, 0);
683 /* Convert to an unsigned integer of the correct width first, and from
684 there widen/truncate to the required type. Some targets support the
685 coexistence of multiple valid pointer sizes, so fetch the one we need
686 from the type. */
687 if (!dofold)
688 return build1 (CONVERT_EXPR, type, expr);
689 expr = fold_build1 (CONVERT_EXPR,
690 lang_hooks.types.type_for_size
691 (TYPE_PRECISION (intype), 0),
692 expr);
693 return fold_convert (type, expr);
695 case INTEGER_TYPE:
696 case ENUMERAL_TYPE:
697 case BOOLEAN_TYPE:
698 case OFFSET_TYPE:
699 /* If this is a logical operation, which just returns 0 or 1, we can
700 change the type of the expression. */
702 if (TREE_CODE_CLASS (ex_form) == tcc_comparison)
704 expr = copy_node (expr);
705 TREE_TYPE (expr) = type;
706 return expr;
709 /* If we are widening the type, put in an explicit conversion.
710 Similarly if we are not changing the width. After this, we know
711 we are truncating EXPR. */
713 else if (outprec >= inprec)
715 enum tree_code code;
717 /* If the precision of the EXPR's type is K bits and the
718 destination mode has more bits, and the sign is changing,
719 it is not safe to use a NOP_EXPR. For example, suppose
720 that EXPR's type is a 3-bit unsigned integer type, the
721 TYPE is a 3-bit signed integer type, and the machine mode
722 for the types is 8-bit QImode. In that case, the
723 conversion necessitates an explicit sign-extension. In
724 the signed-to-unsigned case the high-order bits have to
725 be cleared. */
726 if (TYPE_UNSIGNED (type) != TYPE_UNSIGNED (TREE_TYPE (expr))
727 && !type_has_mode_precision_p (TREE_TYPE (expr)))
728 code = CONVERT_EXPR;
729 else
730 code = NOP_EXPR;
732 return maybe_fold_build1_loc (dofold, loc, code, type, expr);
735 /* If TYPE is an enumeral type or a type with a precision less
736 than the number of bits in its mode, do the conversion to the
737 type corresponding to its mode, then do a nop conversion
738 to TYPE. */
739 else if (TREE_CODE (type) == ENUMERAL_TYPE
740 || maybe_ne (outprec, GET_MODE_PRECISION (TYPE_MODE (type))))
742 expr = convert (lang_hooks.types.type_for_mode
743 (TYPE_MODE (type), TYPE_UNSIGNED (type)), expr);
744 return maybe_fold_build1_loc (dofold, loc, NOP_EXPR, type, expr);
747 /* Here detect when we can distribute the truncation down past some
748 arithmetic. For example, if adding two longs and converting to an
749 int, we can equally well convert both to ints and then add.
750 For the operations handled here, such truncation distribution
751 is always safe.
752 It is desirable in these cases:
753 1) when truncating down to full-word from a larger size
754 2) when truncating takes no work.
755 3) when at least one operand of the arithmetic has been extended
756 (as by C's default conversions). In this case we need two conversions
757 if we do the arithmetic as already requested, so we might as well
758 truncate both and then combine. Perhaps that way we need only one.
760 Note that in general we cannot do the arithmetic in a type
761 shorter than the desired result of conversion, even if the operands
762 are both extended from a shorter type, because they might overflow
763 if combined in that type. The exceptions to this--the times when
764 two narrow values can be combined in their narrow type even to
765 make a wider result--are handled by "shorten" in build_binary_op. */
767 if (dofold)
768 switch (ex_form)
770 case RSHIFT_EXPR:
771 /* We can pass truncation down through right shifting
772 when the shift count is a nonpositive constant. */
773 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
774 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) <= 0)
775 goto trunc1;
776 break;
778 case LSHIFT_EXPR:
779 /* We can pass truncation down through left shifting
780 when the shift count is a nonnegative constant and
781 the target type is unsigned. */
782 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
783 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) >= 0
784 && TYPE_UNSIGNED (type)
785 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
787 /* If shift count is less than the width of the truncated type,
788 really shift. */
789 if (tree_int_cst_lt (TREE_OPERAND (expr, 1), TYPE_SIZE (type)))
790 /* In this case, shifting is like multiplication. */
791 goto trunc1;
792 else
794 /* If it is >= that width, result is zero.
795 Handling this with trunc1 would give the wrong result:
796 (int) ((long long) a << 32) is well defined (as 0)
797 but (int) a << 32 is undefined and would get a
798 warning. */
800 tree t = build_int_cst (type, 0);
802 /* If the original expression had side-effects, we must
803 preserve it. */
804 if (TREE_SIDE_EFFECTS (expr))
805 return build2 (COMPOUND_EXPR, type, expr, t);
806 else
807 return t;
810 break;
812 case TRUNC_DIV_EXPR:
814 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), NULL_TREE);
815 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), NULL_TREE);
817 /* Don't distribute unless the output precision is at least as
818 big as the actual inputs and it has the same signedness. */
819 if (outprec >= TYPE_PRECISION (TREE_TYPE (arg0))
820 && outprec >= TYPE_PRECISION (TREE_TYPE (arg1))
821 /* If signedness of arg0 and arg1 don't match,
822 we can't necessarily find a type to compare them in. */
823 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
824 == TYPE_UNSIGNED (TREE_TYPE (arg1)))
825 /* Do not change the sign of the division. */
826 && (TYPE_UNSIGNED (TREE_TYPE (expr))
827 == TYPE_UNSIGNED (TREE_TYPE (arg0)))
828 /* Either require unsigned division or a division by
829 a constant that is not -1. */
830 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
831 || (TREE_CODE (arg1) == INTEGER_CST
832 && !integer_all_onesp (arg1))))
834 tree tem = do_narrow (loc, ex_form, type, arg0, arg1,
835 expr, inprec, outprec, dofold);
836 if (tem)
837 return tem;
839 break;
842 case MAX_EXPR:
843 case MIN_EXPR:
844 case MULT_EXPR:
846 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), type);
847 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), type);
849 /* Don't distribute unless the output precision is at least as
850 big as the actual inputs. Otherwise, the comparison of the
851 truncated values will be wrong. */
852 if (outprec >= TYPE_PRECISION (TREE_TYPE (arg0))
853 && outprec >= TYPE_PRECISION (TREE_TYPE (arg1))
854 /* If signedness of arg0 and arg1 don't match,
855 we can't necessarily find a type to compare them in. */
856 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
857 == TYPE_UNSIGNED (TREE_TYPE (arg1))))
858 goto trunc1;
859 break;
862 case PLUS_EXPR:
863 case MINUS_EXPR:
864 case BIT_AND_EXPR:
865 case BIT_IOR_EXPR:
866 case BIT_XOR_EXPR:
867 trunc1:
869 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), type);
870 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), type);
872 /* Do not try to narrow operands of pointer subtraction;
873 that will interfere with other folding. */
874 if (ex_form == MINUS_EXPR
875 && CONVERT_EXPR_P (arg0)
876 && CONVERT_EXPR_P (arg1)
877 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
878 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
879 break;
881 if (outprec >= BITS_PER_WORD
882 || targetm.truly_noop_truncation (outprec, inprec)
883 || inprec > TYPE_PRECISION (TREE_TYPE (arg0))
884 || inprec > TYPE_PRECISION (TREE_TYPE (arg1)))
886 tree tem = do_narrow (loc, ex_form, type, arg0, arg1,
887 expr, inprec, outprec, dofold);
888 if (tem)
889 return tem;
892 break;
894 case NEGATE_EXPR:
895 /* Using unsigned arithmetic for signed types may hide overflow
896 bugs. */
897 if (!TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (expr, 0)))
898 && sanitize_flags_p (SANITIZE_SI_OVERFLOW))
899 break;
900 /* Fall through. */
901 case BIT_NOT_EXPR:
902 /* This is not correct for ABS_EXPR,
903 since we must test the sign before truncation. */
905 /* Do the arithmetic in type TYPEX,
906 then convert result to TYPE. */
907 tree typex = type;
909 /* Can't do arithmetic in enumeral types
910 so use an integer type that will hold the values. */
911 if (TREE_CODE (typex) == ENUMERAL_TYPE)
912 typex
913 = lang_hooks.types.type_for_size (TYPE_PRECISION (typex),
914 TYPE_UNSIGNED (typex));
916 if (!TYPE_UNSIGNED (typex))
917 typex = unsigned_type_for (typex);
918 return convert (type,
919 fold_build1 (ex_form, typex,
920 convert (typex,
921 TREE_OPERAND (expr, 0))));
924 CASE_CONVERT:
926 tree argtype = TREE_TYPE (TREE_OPERAND (expr, 0));
927 /* Don't introduce a "can't convert between vector values
928 of different size" error. */
929 if (TREE_CODE (argtype) == VECTOR_TYPE
930 && maybe_ne (GET_MODE_SIZE (TYPE_MODE (argtype)),
931 GET_MODE_SIZE (TYPE_MODE (type))))
932 break;
934 /* If truncating after truncating, might as well do all at once.
935 If truncating after extending, we may get rid of wasted work. */
936 return convert (type, get_unwidened (TREE_OPERAND (expr, 0), type));
938 case COND_EXPR:
939 /* It is sometimes worthwhile to push the narrowing down through
940 the conditional and never loses. A COND_EXPR may have a throw
941 as one operand, which then has void type. Just leave void
942 operands as they are. */
943 return
944 fold_build3 (COND_EXPR, type, TREE_OPERAND (expr, 0),
945 VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1)))
946 ? TREE_OPERAND (expr, 1)
947 : convert (type, TREE_OPERAND (expr, 1)),
948 VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 2)))
949 ? TREE_OPERAND (expr, 2)
950 : convert (type, TREE_OPERAND (expr, 2)));
952 default:
953 break;
956 /* When parsing long initializers, we might end up with a lot of casts.
957 Shortcut this. */
958 if (TREE_CODE (expr) == INTEGER_CST)
959 return fold_convert (type, expr);
960 return build1 (CONVERT_EXPR, type, expr);
962 case REAL_TYPE:
963 if (sanitize_flags_p (SANITIZE_FLOAT_CAST)
964 && current_function_decl != NULL_TREE)
966 expr = save_expr (expr);
967 tree check = ubsan_instrument_float_cast (loc, type, expr);
968 expr = build1 (FIX_TRUNC_EXPR, type, expr);
969 if (check == NULL_TREE)
970 return expr;
971 return maybe_fold_build2_loc (dofold, loc, COMPOUND_EXPR,
972 TREE_TYPE (expr), check, expr);
974 else
975 return build1 (FIX_TRUNC_EXPR, type, expr);
977 case FIXED_POINT_TYPE:
978 return build1 (FIXED_CONVERT_EXPR, type, expr);
980 case COMPLEX_TYPE:
981 expr = maybe_fold_build1_loc (dofold, loc, REALPART_EXPR,
982 TREE_TYPE (TREE_TYPE (expr)), expr);
983 return convert (type, expr);
985 case VECTOR_TYPE:
986 if (!tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (TREE_TYPE (expr))))
988 error ("can%'t convert a vector of type %qT"
989 " to type %qT which has different size",
990 TREE_TYPE (expr), type);
991 return error_mark_node;
993 return build1 (VIEW_CONVERT_EXPR, type, expr);
995 default:
996 error ("aggregate value used where an integer was expected");
997 return convert (type, integer_zero_node);
1001 /* Convert EXPR to some integer (or enum) type TYPE.
1003 EXPR must be pointer, integer, discrete (enum, char, or bool), float,
1004 fixed-point or vector; in other cases error is called.
1006 The result of this is always supposed to be a newly created tree node
1007 not in use in any existing structure. */
1009 tree
1010 convert_to_integer (tree type, tree expr)
1012 return convert_to_integer_1 (type, expr, true);
1015 /* A wrapper around convert_to_complex_1 that only folds the
1016 expression if DOFOLD, or if it is CONSTANT_CLASS_P. */
1018 tree
1019 convert_to_integer_maybe_fold (tree type, tree expr, bool dofold)
1021 return convert_to_integer_1 (type, expr, dofold || CONSTANT_CLASS_P (expr));
1024 /* Convert EXPR to the complex type TYPE in the usual ways. If FOLD_P is
1025 true, try to fold the expression. */
1027 static tree
1028 convert_to_complex_1 (tree type, tree expr, bool fold_p)
1030 location_t loc = EXPR_LOCATION (expr);
1031 tree subtype = TREE_TYPE (type);
1033 switch (TREE_CODE (TREE_TYPE (expr)))
1035 case REAL_TYPE:
1036 case FIXED_POINT_TYPE:
1037 case INTEGER_TYPE:
1038 case ENUMERAL_TYPE:
1039 case BOOLEAN_TYPE:
1040 return build2 (COMPLEX_EXPR, type, convert (subtype, expr),
1041 convert (subtype, integer_zero_node));
1043 case COMPLEX_TYPE:
1045 tree elt_type = TREE_TYPE (TREE_TYPE (expr));
1047 if (TYPE_MAIN_VARIANT (elt_type) == TYPE_MAIN_VARIANT (subtype))
1048 return expr;
1049 else if (TREE_CODE (expr) == COMPOUND_EXPR)
1051 tree t = convert_to_complex_1 (type, TREE_OPERAND (expr, 1),
1052 fold_p);
1053 if (t == TREE_OPERAND (expr, 1))
1054 return expr;
1055 return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR,
1056 TREE_TYPE (t), TREE_OPERAND (expr, 0), t);
1058 else if (TREE_CODE (expr) == COMPLEX_EXPR)
1059 return maybe_fold_build2_loc (fold_p, loc, COMPLEX_EXPR, type,
1060 convert (subtype,
1061 TREE_OPERAND (expr, 0)),
1062 convert (subtype,
1063 TREE_OPERAND (expr, 1)));
1064 else
1066 expr = save_expr (expr);
1067 tree realp = maybe_fold_build1_loc (fold_p, loc, REALPART_EXPR,
1068 TREE_TYPE (TREE_TYPE (expr)),
1069 expr);
1070 tree imagp = maybe_fold_build1_loc (fold_p, loc, IMAGPART_EXPR,
1071 TREE_TYPE (TREE_TYPE (expr)),
1072 expr);
1073 return maybe_fold_build2_loc (fold_p, loc, COMPLEX_EXPR, type,
1074 convert (subtype, realp),
1075 convert (subtype, imagp));
1079 case POINTER_TYPE:
1080 case REFERENCE_TYPE:
1081 error ("pointer value used where a complex was expected");
1082 return convert_to_complex_1 (type, integer_zero_node, fold_p);
1084 default:
1085 error ("aggregate value used where a complex was expected");
1086 return convert_to_complex_1 (type, integer_zero_node, fold_p);
1090 /* A wrapper around convert_to_complex_1 that always folds the
1091 expression. */
1093 tree
1094 convert_to_complex (tree type, tree expr)
1096 return convert_to_complex_1 (type, expr, true);
1099 /* A wrapper around convert_to_complex_1 that only folds the
1100 expression if DOFOLD, or if it is CONSTANT_CLASS_P. */
1102 tree
1103 convert_to_complex_maybe_fold (tree type, tree expr, bool dofold)
1105 return convert_to_complex_1 (type, expr, dofold || CONSTANT_CLASS_P (expr));
1108 /* Convert EXPR to the vector type TYPE in the usual ways. */
1110 tree
1111 convert_to_vector (tree type, tree expr)
1113 switch (TREE_CODE (TREE_TYPE (expr)))
1115 case INTEGER_TYPE:
1116 case VECTOR_TYPE:
1117 if (!tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (TREE_TYPE (expr))))
1119 error ("can%'t convert a value of type %qT"
1120 " to vector type %qT which has different size",
1121 TREE_TYPE (expr), type);
1122 return error_mark_node;
1124 return build1 (VIEW_CONVERT_EXPR, type, expr);
1126 default:
1127 error ("can%'t convert value to a vector");
1128 return error_mark_node;
1132 /* Convert EXPR to some fixed-point type TYPE.
1134 EXPR must be fixed-point, float, integer, or enumeral;
1135 in other cases error is called. */
1137 tree
1138 convert_to_fixed (tree type, tree expr)
1140 if (integer_zerop (expr))
1142 tree fixed_zero_node = build_fixed (type, FCONST0 (TYPE_MODE (type)));
1143 return fixed_zero_node;
1145 else if (integer_onep (expr) && ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)))
1147 tree fixed_one_node = build_fixed (type, FCONST1 (TYPE_MODE (type)));
1148 return fixed_one_node;
1151 switch (TREE_CODE (TREE_TYPE (expr)))
1153 case FIXED_POINT_TYPE:
1154 case INTEGER_TYPE:
1155 case ENUMERAL_TYPE:
1156 case BOOLEAN_TYPE:
1157 case REAL_TYPE:
1158 return build1 (FIXED_CONVERT_EXPR, type, expr);
1160 case COMPLEX_TYPE:
1161 return convert (type,
1162 fold_build1 (REALPART_EXPR,
1163 TREE_TYPE (TREE_TYPE (expr)), expr));
1165 default:
1166 error ("aggregate value used where a fixed-point was expected");
1167 return error_mark_node;