svn merge -r215707:216846 svn+ssh://gcc.gnu.org/svn/gcc/trunk
[official-gcc.git] / gcc / builtins.c
blob98c62a2bfc9e861e10a9a9ba78d993f4268fbfb1
1 /* Expand builtin functions.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "calls.h"
30 #include "varasm.h"
31 #include "tree-object-size.h"
32 #include "realmpfr.h"
33 #include "predict.h"
34 #include "vec.h"
35 #include "hashtab.h"
36 #include "hash-set.h"
37 #include "hard-reg-set.h"
38 #include "input.h"
39 #include "function.h"
40 #include "cfgrtl.h"
41 #include "basic-block.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-expr.h"
45 #include "is-a.h"
46 #include "gimple.h"
47 #include "flags.h"
48 #include "regs.h"
49 #include "except.h"
50 #include "insn-config.h"
51 #include "expr.h"
52 #include "optabs.h"
53 #include "libfuncs.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "typeclass.h"
57 #include "tm_p.h"
58 #include "target.h"
59 #include "langhooks.h"
60 #include "tree-ssanames.h"
61 #include "tree-dfa.h"
62 #include "value-prof.h"
63 #include "diagnostic-core.h"
64 #include "builtins.h"
65 #include "asan.h"
66 #include "ubsan.h"
67 #include "cilk.h"
70 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
72 struct target_builtins default_target_builtins;
73 #if SWITCHABLE_TARGET
74 struct target_builtins *this_target_builtins = &default_target_builtins;
75 #endif
77 /* Define the names of the builtin function types and codes. */
78 const char *const built_in_class_names[BUILT_IN_LAST]
79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
81 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
82 const char * built_in_names[(int) END_BUILTINS] =
84 #include "builtins.def"
86 #undef DEF_BUILTIN
88 /* Setup an array of _DECL trees, make sure each element is
89 initialized to NULL_TREE. */
90 builtin_info_type builtin_info;
92 /* Non-zero if __builtin_constant_p should be folded right away. */
93 bool force_folding_builtin_constant_p;
95 static rtx c_readstr (const char *, machine_mode);
96 static int target_char_cast (tree, char *);
97 static rtx get_memory_rtx (tree, tree);
98 static int apply_args_size (void);
99 static int apply_result_size (void);
100 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
101 static rtx result_vector (int, rtx);
102 #endif
103 static void expand_builtin_update_setjmp_buf (rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static void expand_errno_check (tree, rtx);
112 static rtx expand_builtin_mathfn (tree, rtx, rtx);
113 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
114 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
115 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
116 static rtx expand_builtin_interclass_mathfn (tree, rtx);
117 static rtx expand_builtin_sincos (tree);
118 static rtx expand_builtin_cexpi (tree, rtx);
119 static rtx expand_builtin_int_roundingfn (tree, rtx);
120 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
121 static rtx expand_builtin_next_arg (void);
122 static rtx expand_builtin_va_start (tree);
123 static rtx expand_builtin_va_end (tree);
124 static rtx expand_builtin_va_copy (tree);
125 static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
126 static rtx expand_builtin_strcmp (tree, rtx);
127 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
128 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
129 static rtx expand_builtin_memcpy (tree, rtx);
130 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
131 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
132 machine_mode, int);
133 static rtx expand_builtin_strcpy (tree, rtx);
134 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
135 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
136 static rtx expand_builtin_strncpy (tree, rtx);
137 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
138 static rtx expand_builtin_memset (tree, rtx, machine_mode);
139 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
140 static rtx expand_builtin_bzero (tree);
141 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
142 static rtx expand_builtin_alloca (tree, bool);
143 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
144 static rtx expand_builtin_frame_address (tree, tree);
145 static tree stabilize_va_list_loc (location_t, tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_classify_type (tree);
149 static tree fold_builtin_strlen (location_t, tree, tree);
150 static tree fold_builtin_inf (location_t, tree, int);
151 static tree fold_builtin_nan (tree, tree, int);
152 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
153 static bool validate_arg (const_tree, enum tree_code code);
154 static bool integer_valued_real_p (tree);
155 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
156 static rtx expand_builtin_fabs (tree, rtx, rtx);
157 static rtx expand_builtin_signbit (tree, rtx);
158 static tree fold_builtin_sqrt (location_t, tree, tree);
159 static tree fold_builtin_cbrt (location_t, tree, tree);
160 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
161 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
162 static tree fold_builtin_cos (location_t, tree, tree, tree);
163 static tree fold_builtin_cosh (location_t, tree, tree, tree);
164 static tree fold_builtin_tan (tree, tree);
165 static tree fold_builtin_trunc (location_t, tree, tree);
166 static tree fold_builtin_floor (location_t, tree, tree);
167 static tree fold_builtin_ceil (location_t, tree, tree);
168 static tree fold_builtin_round (location_t, tree, tree);
169 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
170 static tree fold_builtin_bitop (tree, tree);
171 static tree fold_builtin_strchr (location_t, tree, tree, tree);
172 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
173 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
174 static tree fold_builtin_strcmp (location_t, tree, tree);
175 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
176 static tree fold_builtin_signbit (location_t, tree, tree);
177 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
178 static tree fold_builtin_isascii (location_t, tree);
179 static tree fold_builtin_toascii (location_t, tree);
180 static tree fold_builtin_isdigit (location_t, tree);
181 static tree fold_builtin_fabs (location_t, tree, tree);
182 static tree fold_builtin_abs (location_t, tree, tree);
183 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
184 enum tree_code);
185 static tree fold_builtin_0 (location_t, tree, bool);
186 static tree fold_builtin_1 (location_t, tree, tree, bool);
187 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
188 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
189 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
190 static tree fold_builtin_varargs (location_t, tree, tree, bool);
192 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
193 static tree fold_builtin_strstr (location_t, tree, tree, tree);
194 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
195 static tree fold_builtin_strncat (location_t, tree, tree, tree);
196 static tree fold_builtin_strspn (location_t, tree, tree);
197 static tree fold_builtin_strcspn (location_t, tree, tree);
199 static rtx expand_builtin_object_size (tree);
200 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
201 enum built_in_function);
202 static void maybe_emit_chk_warning (tree, enum built_in_function);
203 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
204 static void maybe_emit_free_warning (tree);
205 static tree fold_builtin_object_size (tree, tree);
206 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
207 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
208 enum built_in_function);
210 static unsigned HOST_WIDE_INT target_newline;
211 unsigned HOST_WIDE_INT target_percent;
212 static unsigned HOST_WIDE_INT target_c;
213 static unsigned HOST_WIDE_INT target_s;
214 static char target_percent_c[3];
215 char target_percent_s[3];
216 static char target_percent_s_newline[4];
217 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
218 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
219 static tree do_mpfr_arg2 (tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_arg3 (tree, tree, tree, tree,
222 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
223 static tree do_mpfr_sincos (tree, tree, tree);
224 static tree do_mpfr_bessel_n (tree, tree, tree,
225 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
226 const REAL_VALUE_TYPE *, bool);
227 static tree do_mpfr_remquo (tree, tree, tree);
228 static tree do_mpfr_lgamma_r (tree, tree, tree);
229 static void expand_builtin_sync_synchronize (void);
231 /* Return true if NAME starts with __builtin_ or __sync_. */
233 static bool
234 is_builtin_name (const char *name)
236 if (strncmp (name, "__builtin_", 10) == 0)
237 return true;
238 if (strncmp (name, "__sync_", 7) == 0)
239 return true;
240 if (strncmp (name, "__atomic_", 9) == 0)
241 return true;
242 if (flag_cilkplus
243 && (!strcmp (name, "__cilkrts_detach")
244 || !strcmp (name, "__cilkrts_pop_frame")))
245 return true;
246 return false;
250 /* Return true if DECL is a function symbol representing a built-in. */
252 bool
253 is_builtin_fn (tree decl)
255 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
258 /* Return true if NODE should be considered for inline expansion regardless
259 of the optimization level. This means whenever a function is invoked with
260 its "internal" name, which normally contains the prefix "__builtin". */
262 static bool
263 called_as_built_in (tree node)
265 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
266 we want the name used to call the function, not the name it
267 will have. */
268 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
269 return is_builtin_name (name);
272 /* Compute values M and N such that M divides (address of EXP - N) and such
273 that N < M. If these numbers can be determined, store M in alignp and N in
274 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
275 *alignp and any bit-offset to *bitposp.
277 Note that the address (and thus the alignment) computed here is based
278 on the address to which a symbol resolves, whereas DECL_ALIGN is based
279 on the address at which an object is actually located. These two
280 addresses are not always the same. For example, on ARM targets,
281 the address &foo of a Thumb function foo() has the lowest bit set,
282 whereas foo() itself starts on an even address.
284 If ADDR_P is true we are taking the address of the memory reference EXP
285 and thus cannot rely on the access taking place. */
287 static bool
288 get_object_alignment_2 (tree exp, unsigned int *alignp,
289 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
291 HOST_WIDE_INT bitsize, bitpos;
292 tree offset;
293 machine_mode mode;
294 int unsignedp, volatilep;
295 unsigned int align = BITS_PER_UNIT;
296 bool known_alignment = false;
298 /* Get the innermost object and the constant (bitpos) and possibly
299 variable (offset) offset of the access. */
300 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
301 &mode, &unsignedp, &volatilep, true);
303 /* Extract alignment information from the innermost object and
304 possibly adjust bitpos and offset. */
305 if (TREE_CODE (exp) == FUNCTION_DECL)
307 /* Function addresses can encode extra information besides their
308 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
309 allows the low bit to be used as a virtual bit, we know
310 that the address itself must be at least 2-byte aligned. */
311 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
312 align = 2 * BITS_PER_UNIT;
314 else if (TREE_CODE (exp) == LABEL_DECL)
316 else if (TREE_CODE (exp) == CONST_DECL)
318 /* The alignment of a CONST_DECL is determined by its initializer. */
319 exp = DECL_INITIAL (exp);
320 align = TYPE_ALIGN (TREE_TYPE (exp));
321 #ifdef CONSTANT_ALIGNMENT
322 if (CONSTANT_CLASS_P (exp))
323 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
324 #endif
325 known_alignment = true;
327 else if (DECL_P (exp))
329 align = DECL_ALIGN (exp);
330 known_alignment = true;
332 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
334 align = TYPE_ALIGN (TREE_TYPE (exp));
336 else if (TREE_CODE (exp) == INDIRECT_REF
337 || TREE_CODE (exp) == MEM_REF
338 || TREE_CODE (exp) == TARGET_MEM_REF)
340 tree addr = TREE_OPERAND (exp, 0);
341 unsigned ptr_align;
342 unsigned HOST_WIDE_INT ptr_bitpos;
344 if (TREE_CODE (addr) == BIT_AND_EXPR
345 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
347 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
348 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
349 align *= BITS_PER_UNIT;
350 addr = TREE_OPERAND (addr, 0);
353 known_alignment
354 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
355 align = MAX (ptr_align, align);
357 /* The alignment of the pointer operand in a TARGET_MEM_REF
358 has to take the variable offset parts into account. */
359 if (TREE_CODE (exp) == TARGET_MEM_REF)
361 if (TMR_INDEX (exp))
363 unsigned HOST_WIDE_INT step = 1;
364 if (TMR_STEP (exp))
365 step = TREE_INT_CST_LOW (TMR_STEP (exp));
366 align = MIN (align, (step & -step) * BITS_PER_UNIT);
368 if (TMR_INDEX2 (exp))
369 align = BITS_PER_UNIT;
370 known_alignment = false;
373 /* When EXP is an actual memory reference then we can use
374 TYPE_ALIGN of a pointer indirection to derive alignment.
375 Do so only if get_pointer_alignment_1 did not reveal absolute
376 alignment knowledge and if using that alignment would
377 improve the situation. */
378 if (!addr_p && !known_alignment
379 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
380 align = TYPE_ALIGN (TREE_TYPE (exp));
381 else
383 /* Else adjust bitpos accordingly. */
384 bitpos += ptr_bitpos;
385 if (TREE_CODE (exp) == MEM_REF
386 || TREE_CODE (exp) == TARGET_MEM_REF)
387 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
390 else if (TREE_CODE (exp) == STRING_CST)
392 /* STRING_CST are the only constant objects we allow to be not
393 wrapped inside a CONST_DECL. */
394 align = TYPE_ALIGN (TREE_TYPE (exp));
395 #ifdef CONSTANT_ALIGNMENT
396 if (CONSTANT_CLASS_P (exp))
397 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
398 #endif
399 known_alignment = true;
402 /* If there is a non-constant offset part extract the maximum
403 alignment that can prevail. */
404 if (offset)
406 unsigned int trailing_zeros = tree_ctz (offset);
407 if (trailing_zeros < HOST_BITS_PER_INT)
409 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
410 if (inner)
411 align = MIN (align, inner);
415 *alignp = align;
416 *bitposp = bitpos & (*alignp - 1);
417 return known_alignment;
420 /* For a memory reference expression EXP compute values M and N such that M
421 divides (&EXP - N) and such that N < M. If these numbers can be determined,
422 store M in alignp and N in *BITPOSP and return true. Otherwise return false
423 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
425 bool
426 get_object_alignment_1 (tree exp, unsigned int *alignp,
427 unsigned HOST_WIDE_INT *bitposp)
429 return get_object_alignment_2 (exp, alignp, bitposp, false);
432 /* Return the alignment in bits of EXP, an object. */
434 unsigned int
435 get_object_alignment (tree exp)
437 unsigned HOST_WIDE_INT bitpos = 0;
438 unsigned int align;
440 get_object_alignment_1 (exp, &align, &bitpos);
442 /* align and bitpos now specify known low bits of the pointer.
443 ptr & (align - 1) == bitpos. */
445 if (bitpos != 0)
446 align = (bitpos & -bitpos);
447 return align;
450 /* For a pointer valued expression EXP compute values M and N such that M
451 divides (EXP - N) and such that N < M. If these numbers can be determined,
452 store M in alignp and N in *BITPOSP and return true. Return false if
453 the results are just a conservative approximation.
455 If EXP is not a pointer, false is returned too. */
457 bool
458 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
459 unsigned HOST_WIDE_INT *bitposp)
461 STRIP_NOPS (exp);
463 if (TREE_CODE (exp) == ADDR_EXPR)
464 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
465 alignp, bitposp, true);
466 else if (TREE_CODE (exp) == SSA_NAME
467 && POINTER_TYPE_P (TREE_TYPE (exp)))
469 unsigned int ptr_align, ptr_misalign;
470 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
472 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
474 *bitposp = ptr_misalign * BITS_PER_UNIT;
475 *alignp = ptr_align * BITS_PER_UNIT;
476 /* We cannot really tell whether this result is an approximation. */
477 return true;
479 else
481 *bitposp = 0;
482 *alignp = BITS_PER_UNIT;
483 return false;
486 else if (TREE_CODE (exp) == INTEGER_CST)
488 *alignp = BIGGEST_ALIGNMENT;
489 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
490 & (BIGGEST_ALIGNMENT - 1));
491 return true;
494 *bitposp = 0;
495 *alignp = BITS_PER_UNIT;
496 return false;
499 /* Return the alignment in bits of EXP, a pointer valued expression.
500 The alignment returned is, by default, the alignment of the thing that
501 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
503 Otherwise, look at the expression to see if we can do better, i.e., if the
504 expression is actually pointing at an object whose alignment is tighter. */
506 unsigned int
507 get_pointer_alignment (tree exp)
509 unsigned HOST_WIDE_INT bitpos = 0;
510 unsigned int align;
512 get_pointer_alignment_1 (exp, &align, &bitpos);
514 /* align and bitpos now specify known low bits of the pointer.
515 ptr & (align - 1) == bitpos. */
517 if (bitpos != 0)
518 align = (bitpos & -bitpos);
520 return align;
523 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
524 way, because it could contain a zero byte in the middle.
525 TREE_STRING_LENGTH is the size of the character array, not the string.
527 ONLY_VALUE should be nonzero if the result is not going to be emitted
528 into the instruction stream and zero if it is going to be expanded.
529 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
530 is returned, otherwise NULL, since
531 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
532 evaluate the side-effects.
534 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
535 accesses. Note that this implies the result is not going to be emitted
536 into the instruction stream.
538 The value returned is of type `ssizetype'.
540 Unfortunately, string_constant can't access the values of const char
541 arrays with initializers, so neither can we do so here. */
543 tree
544 c_strlen (tree src, int only_value)
546 tree offset_node;
547 HOST_WIDE_INT offset;
548 int max;
549 const char *ptr;
550 location_t loc;
552 STRIP_NOPS (src);
553 if (TREE_CODE (src) == COND_EXPR
554 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
556 tree len1, len2;
558 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
559 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
560 if (tree_int_cst_equal (len1, len2))
561 return len1;
564 if (TREE_CODE (src) == COMPOUND_EXPR
565 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
566 return c_strlen (TREE_OPERAND (src, 1), only_value);
568 loc = EXPR_LOC_OR_LOC (src, input_location);
570 src = string_constant (src, &offset_node);
571 if (src == 0)
572 return NULL_TREE;
574 max = TREE_STRING_LENGTH (src) - 1;
575 ptr = TREE_STRING_POINTER (src);
577 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
579 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
580 compute the offset to the following null if we don't know where to
581 start searching for it. */
582 int i;
584 for (i = 0; i < max; i++)
585 if (ptr[i] == 0)
586 return NULL_TREE;
588 /* We don't know the starting offset, but we do know that the string
589 has no internal zero bytes. We can assume that the offset falls
590 within the bounds of the string; otherwise, the programmer deserves
591 what he gets. Subtract the offset from the length of the string,
592 and return that. This would perhaps not be valid if we were dealing
593 with named arrays in addition to literal string constants. */
595 return size_diffop_loc (loc, size_int (max), offset_node);
598 /* We have a known offset into the string. Start searching there for
599 a null character if we can represent it as a single HOST_WIDE_INT. */
600 if (offset_node == 0)
601 offset = 0;
602 else if (! tree_fits_shwi_p (offset_node))
603 offset = -1;
604 else
605 offset = tree_to_shwi (offset_node);
607 /* If the offset is known to be out of bounds, warn, and call strlen at
608 runtime. */
609 if (offset < 0 || offset > max)
611 /* Suppress multiple warnings for propagated constant strings. */
612 if (only_value != 2
613 && !TREE_NO_WARNING (src))
615 warning_at (loc, 0, "offset outside bounds of constant string");
616 TREE_NO_WARNING (src) = 1;
618 return NULL_TREE;
621 /* Use strlen to search for the first zero byte. Since any strings
622 constructed with build_string will have nulls appended, we win even
623 if we get handed something like (char[4])"abcd".
625 Since OFFSET is our starting index into the string, no further
626 calculation is needed. */
627 return ssize_int (strlen (ptr + offset));
630 /* Return a char pointer for a C string if it is a string constant
631 or sum of string constant and integer constant. */
633 const char *
634 c_getstr (tree src)
636 tree offset_node;
638 src = string_constant (src, &offset_node);
639 if (src == 0)
640 return 0;
642 if (offset_node == 0)
643 return TREE_STRING_POINTER (src);
644 else if (!tree_fits_uhwi_p (offset_node)
645 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
646 return 0;
648 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
651 /* Return a constant integer corresponding to target reading
652 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
654 static rtx
655 c_readstr (const char *str, machine_mode mode)
657 HOST_WIDE_INT ch;
658 unsigned int i, j;
659 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
661 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
662 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
663 / HOST_BITS_PER_WIDE_INT;
665 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
666 for (i = 0; i < len; i++)
667 tmp[i] = 0;
669 ch = 1;
670 for (i = 0; i < GET_MODE_SIZE (mode); i++)
672 j = i;
673 if (WORDS_BIG_ENDIAN)
674 j = GET_MODE_SIZE (mode) - i - 1;
675 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
676 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
677 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
678 j *= BITS_PER_UNIT;
680 if (ch)
681 ch = (unsigned char) str[i];
682 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
685 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
686 return immed_wide_int_const (c, mode);
689 /* Cast a target constant CST to target CHAR and if that value fits into
690 host char type, return zero and put that value into variable pointed to by
691 P. */
693 static int
694 target_char_cast (tree cst, char *p)
696 unsigned HOST_WIDE_INT val, hostval;
698 if (TREE_CODE (cst) != INTEGER_CST
699 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
700 return 1;
702 /* Do not care if it fits or not right here. */
703 val = TREE_INT_CST_LOW (cst);
705 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
706 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
708 hostval = val;
709 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
710 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
712 if (val != hostval)
713 return 1;
715 *p = hostval;
716 return 0;
719 /* Similar to save_expr, but assumes that arbitrary code is not executed
720 in between the multiple evaluations. In particular, we assume that a
721 non-addressable local variable will not be modified. */
723 static tree
724 builtin_save_expr (tree exp)
726 if (TREE_CODE (exp) == SSA_NAME
727 || (TREE_ADDRESSABLE (exp) == 0
728 && (TREE_CODE (exp) == PARM_DECL
729 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
730 return exp;
732 return save_expr (exp);
735 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
736 times to get the address of either a higher stack frame, or a return
737 address located within it (depending on FNDECL_CODE). */
739 static rtx
740 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
742 int i;
744 #ifdef INITIAL_FRAME_ADDRESS_RTX
745 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
746 #else
747 rtx tem;
749 /* For a zero count with __builtin_return_address, we don't care what
750 frame address we return, because target-specific definitions will
751 override us. Therefore frame pointer elimination is OK, and using
752 the soft frame pointer is OK.
754 For a nonzero count, or a zero count with __builtin_frame_address,
755 we require a stable offset from the current frame pointer to the
756 previous one, so we must use the hard frame pointer, and
757 we must disable frame pointer elimination. */
758 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
759 tem = frame_pointer_rtx;
760 else
762 tem = hard_frame_pointer_rtx;
764 /* Tell reload not to eliminate the frame pointer. */
765 crtl->accesses_prior_frames = 1;
767 #endif
769 /* Some machines need special handling before we can access
770 arbitrary frames. For example, on the SPARC, we must first flush
771 all register windows to the stack. */
772 #ifdef SETUP_FRAME_ADDRESSES
773 if (count > 0)
774 SETUP_FRAME_ADDRESSES ();
775 #endif
777 /* On the SPARC, the return address is not in the frame, it is in a
778 register. There is no way to access it off of the current frame
779 pointer, but it can be accessed off the previous frame pointer by
780 reading the value from the register window save area. */
781 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
782 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
783 count--;
784 #endif
786 /* Scan back COUNT frames to the specified frame. */
787 for (i = 0; i < count; i++)
789 /* Assume the dynamic chain pointer is in the word that the
790 frame address points to, unless otherwise specified. */
791 #ifdef DYNAMIC_CHAIN_ADDRESS
792 tem = DYNAMIC_CHAIN_ADDRESS (tem);
793 #endif
794 tem = memory_address (Pmode, tem);
795 tem = gen_frame_mem (Pmode, tem);
796 tem = copy_to_reg (tem);
799 /* For __builtin_frame_address, return what we've got. But, on
800 the SPARC for example, we may have to add a bias. */
801 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
802 #ifdef FRAME_ADDR_RTX
803 return FRAME_ADDR_RTX (tem);
804 #else
805 return tem;
806 #endif
808 /* For __builtin_return_address, get the return address from that frame. */
809 #ifdef RETURN_ADDR_RTX
810 tem = RETURN_ADDR_RTX (count, tem);
811 #else
812 tem = memory_address (Pmode,
813 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
814 tem = gen_frame_mem (Pmode, tem);
815 #endif
816 return tem;
819 /* Alias set used for setjmp buffer. */
820 static alias_set_type setjmp_alias_set = -1;
822 /* Construct the leading half of a __builtin_setjmp call. Control will
823 return to RECEIVER_LABEL. This is also called directly by the SJLJ
824 exception handling code. */
826 void
827 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
829 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
830 rtx stack_save;
831 rtx mem;
833 if (setjmp_alias_set == -1)
834 setjmp_alias_set = new_alias_set ();
836 buf_addr = convert_memory_address (Pmode, buf_addr);
838 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
840 /* We store the frame pointer and the address of receiver_label in
841 the buffer and use the rest of it for the stack save area, which
842 is machine-dependent. */
844 mem = gen_rtx_MEM (Pmode, buf_addr);
845 set_mem_alias_set (mem, setjmp_alias_set);
846 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
848 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
849 GET_MODE_SIZE (Pmode))),
850 set_mem_alias_set (mem, setjmp_alias_set);
852 emit_move_insn (validize_mem (mem),
853 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
855 stack_save = gen_rtx_MEM (sa_mode,
856 plus_constant (Pmode, buf_addr,
857 2 * GET_MODE_SIZE (Pmode)));
858 set_mem_alias_set (stack_save, setjmp_alias_set);
859 emit_stack_save (SAVE_NONLOCAL, &stack_save);
861 /* If there is further processing to do, do it. */
862 #ifdef HAVE_builtin_setjmp_setup
863 if (HAVE_builtin_setjmp_setup)
864 emit_insn (gen_builtin_setjmp_setup (buf_addr));
865 #endif
867 /* We have a nonlocal label. */
868 cfun->has_nonlocal_label = 1;
871 /* Construct the trailing part of a __builtin_setjmp call. This is
872 also called directly by the SJLJ exception handling code.
873 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
875 void
876 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
878 rtx chain;
880 /* Mark the FP as used when we get here, so we have to make sure it's
881 marked as used by this function. */
882 emit_use (hard_frame_pointer_rtx);
884 /* Mark the static chain as clobbered here so life information
885 doesn't get messed up for it. */
886 chain = targetm.calls.static_chain (current_function_decl, true);
887 if (chain && REG_P (chain))
888 emit_clobber (chain);
890 /* Now put in the code to restore the frame pointer, and argument
891 pointer, if needed. */
892 #ifdef HAVE_nonlocal_goto
893 if (! HAVE_nonlocal_goto)
894 #endif
896 /* First adjust our frame pointer to its actual value. It was
897 previously set to the start of the virtual area corresponding to
898 the stacked variables when we branched here and now needs to be
899 adjusted to the actual hardware fp value.
901 Assignments to virtual registers are converted by
902 instantiate_virtual_regs into the corresponding assignment
903 to the underlying register (fp in this case) that makes
904 the original assignment true.
905 So the following insn will actually be decrementing fp by
906 STARTING_FRAME_OFFSET. */
907 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
909 /* Restoring the frame pointer also modifies the hard frame pointer.
910 Mark it used (so that the previous assignment remains live once
911 the frame pointer is eliminated) and clobbered (to represent the
912 implicit update from the assignment). */
913 emit_use (hard_frame_pointer_rtx);
914 emit_clobber (hard_frame_pointer_rtx);
917 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
918 if (fixed_regs[ARG_POINTER_REGNUM])
920 #ifdef ELIMINABLE_REGS
921 /* If the argument pointer can be eliminated in favor of the
922 frame pointer, we don't need to restore it. We assume here
923 that if such an elimination is present, it can always be used.
924 This is the case on all known machines; if we don't make this
925 assumption, we do unnecessary saving on many machines. */
926 size_t i;
927 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
929 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
930 if (elim_regs[i].from == ARG_POINTER_REGNUM
931 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
932 break;
934 if (i == ARRAY_SIZE (elim_regs))
935 #endif
937 /* Now restore our arg pointer from the address at which it
938 was saved in our stack frame. */
939 emit_move_insn (crtl->args.internal_arg_pointer,
940 copy_to_reg (get_arg_pointer_save_area ()));
943 #endif
945 #ifdef HAVE_builtin_setjmp_receiver
946 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
947 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
948 else
949 #endif
950 #ifdef HAVE_nonlocal_goto_receiver
951 if (HAVE_nonlocal_goto_receiver)
952 emit_insn (gen_nonlocal_goto_receiver ());
953 else
954 #endif
955 { /* Nothing */ }
957 /* We must not allow the code we just generated to be reordered by
958 scheduling. Specifically, the update of the frame pointer must
959 happen immediately, not later. */
960 emit_insn (gen_blockage ());
963 /* __builtin_longjmp is passed a pointer to an array of five words (not
964 all will be used on all machines). It operates similarly to the C
965 library function of the same name, but is more efficient. Much of
966 the code below is copied from the handling of non-local gotos. */
968 static void
969 expand_builtin_longjmp (rtx buf_addr, rtx value)
971 rtx fp, lab, stack;
972 rtx_insn *insn, *last;
973 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
975 /* DRAP is needed for stack realign if longjmp is expanded to current
976 function */
977 if (SUPPORTS_STACK_ALIGNMENT)
978 crtl->need_drap = true;
980 if (setjmp_alias_set == -1)
981 setjmp_alias_set = new_alias_set ();
983 buf_addr = convert_memory_address (Pmode, buf_addr);
985 buf_addr = force_reg (Pmode, buf_addr);
987 /* We require that the user must pass a second argument of 1, because
988 that is what builtin_setjmp will return. */
989 gcc_assert (value == const1_rtx);
991 last = get_last_insn ();
992 #ifdef HAVE_builtin_longjmp
993 if (HAVE_builtin_longjmp)
994 emit_insn (gen_builtin_longjmp (buf_addr));
995 else
996 #endif
998 fp = gen_rtx_MEM (Pmode, buf_addr);
999 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1000 GET_MODE_SIZE (Pmode)));
1002 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1003 2 * GET_MODE_SIZE (Pmode)));
1004 set_mem_alias_set (fp, setjmp_alias_set);
1005 set_mem_alias_set (lab, setjmp_alias_set);
1006 set_mem_alias_set (stack, setjmp_alias_set);
1008 /* Pick up FP, label, and SP from the block and jump. This code is
1009 from expand_goto in stmt.c; see there for detailed comments. */
1010 #ifdef HAVE_nonlocal_goto
1011 if (HAVE_nonlocal_goto)
1012 /* We have to pass a value to the nonlocal_goto pattern that will
1013 get copied into the static_chain pointer, but it does not matter
1014 what that value is, because builtin_setjmp does not use it. */
1015 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1016 else
1017 #endif
1019 lab = copy_to_reg (lab);
1021 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1022 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1024 emit_move_insn (hard_frame_pointer_rtx, fp);
1025 emit_stack_restore (SAVE_NONLOCAL, stack);
1027 emit_use (hard_frame_pointer_rtx);
1028 emit_use (stack_pointer_rtx);
1029 emit_indirect_jump (lab);
1033 /* Search backwards and mark the jump insn as a non-local goto.
1034 Note that this precludes the use of __builtin_longjmp to a
1035 __builtin_setjmp target in the same function. However, we've
1036 already cautioned the user that these functions are for
1037 internal exception handling use only. */
1038 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1040 gcc_assert (insn != last);
1042 if (JUMP_P (insn))
1044 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1045 break;
1047 else if (CALL_P (insn))
1048 break;
1052 static inline bool
1053 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1055 return (iter->i < iter->n);
1058 /* This function validates the types of a function call argument list
1059 against a specified list of tree_codes. If the last specifier is a 0,
1060 that represents an ellipses, otherwise the last specifier must be a
1061 VOID_TYPE. */
1063 static bool
1064 validate_arglist (const_tree callexpr, ...)
1066 enum tree_code code;
1067 bool res = 0;
1068 va_list ap;
1069 const_call_expr_arg_iterator iter;
1070 const_tree arg;
1072 va_start (ap, callexpr);
1073 init_const_call_expr_arg_iterator (callexpr, &iter);
1077 code = (enum tree_code) va_arg (ap, int);
1078 switch (code)
1080 case 0:
1081 /* This signifies an ellipses, any further arguments are all ok. */
1082 res = true;
1083 goto end;
1084 case VOID_TYPE:
1085 /* This signifies an endlink, if no arguments remain, return
1086 true, otherwise return false. */
1087 res = !more_const_call_expr_args_p (&iter);
1088 goto end;
1089 default:
1090 /* If no parameters remain or the parameter's code does not
1091 match the specified code, return false. Otherwise continue
1092 checking any remaining arguments. */
1093 arg = next_const_call_expr_arg (&iter);
1094 if (!validate_arg (arg, code))
1095 goto end;
1096 break;
1099 while (1);
1101 /* We need gotos here since we can only have one VA_CLOSE in a
1102 function. */
1103 end: ;
1104 va_end (ap);
1106 return res;
1109 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1110 and the address of the save area. */
1112 static rtx
1113 expand_builtin_nonlocal_goto (tree exp)
1115 tree t_label, t_save_area;
1116 rtx r_label, r_save_area, r_fp, r_sp;
1117 rtx_insn *insn;
1119 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1120 return NULL_RTX;
1122 t_label = CALL_EXPR_ARG (exp, 0);
1123 t_save_area = CALL_EXPR_ARG (exp, 1);
1125 r_label = expand_normal (t_label);
1126 r_label = convert_memory_address (Pmode, r_label);
1127 r_save_area = expand_normal (t_save_area);
1128 r_save_area = convert_memory_address (Pmode, r_save_area);
1129 /* Copy the address of the save location to a register just in case it was
1130 based on the frame pointer. */
1131 r_save_area = copy_to_reg (r_save_area);
1132 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1133 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1134 plus_constant (Pmode, r_save_area,
1135 GET_MODE_SIZE (Pmode)));
1137 crtl->has_nonlocal_goto = 1;
1139 #ifdef HAVE_nonlocal_goto
1140 /* ??? We no longer need to pass the static chain value, afaik. */
1141 if (HAVE_nonlocal_goto)
1142 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1143 else
1144 #endif
1146 r_label = copy_to_reg (r_label);
1148 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1149 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1151 /* Restore frame pointer for containing function. */
1152 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1153 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1155 /* USE of hard_frame_pointer_rtx added for consistency;
1156 not clear if really needed. */
1157 emit_use (hard_frame_pointer_rtx);
1158 emit_use (stack_pointer_rtx);
1160 /* If the architecture is using a GP register, we must
1161 conservatively assume that the target function makes use of it.
1162 The prologue of functions with nonlocal gotos must therefore
1163 initialize the GP register to the appropriate value, and we
1164 must then make sure that this value is live at the point
1165 of the jump. (Note that this doesn't necessarily apply
1166 to targets with a nonlocal_goto pattern; they are free
1167 to implement it in their own way. Note also that this is
1168 a no-op if the GP register is a global invariant.) */
1169 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1170 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1171 emit_use (pic_offset_table_rtx);
1173 emit_indirect_jump (r_label);
1176 /* Search backwards to the jump insn and mark it as a
1177 non-local goto. */
1178 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1180 if (JUMP_P (insn))
1182 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1183 break;
1185 else if (CALL_P (insn))
1186 break;
1189 return const0_rtx;
1192 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1193 (not all will be used on all machines) that was passed to __builtin_setjmp.
1194 It updates the stack pointer in that block to correspond to the current
1195 stack pointer. */
1197 static void
1198 expand_builtin_update_setjmp_buf (rtx buf_addr)
1200 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1201 rtx stack_save
1202 = gen_rtx_MEM (sa_mode,
1203 memory_address
1204 (sa_mode,
1205 plus_constant (Pmode, buf_addr,
1206 2 * GET_MODE_SIZE (Pmode))));
1208 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1211 /* Expand a call to __builtin_prefetch. For a target that does not support
1212 data prefetch, evaluate the memory address argument in case it has side
1213 effects. */
1215 static void
1216 expand_builtin_prefetch (tree exp)
1218 tree arg0, arg1, arg2;
1219 int nargs;
1220 rtx op0, op1, op2;
1222 if (!validate_arglist (exp, POINTER_TYPE, 0))
1223 return;
1225 arg0 = CALL_EXPR_ARG (exp, 0);
1227 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1228 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1229 locality). */
1230 nargs = call_expr_nargs (exp);
1231 if (nargs > 1)
1232 arg1 = CALL_EXPR_ARG (exp, 1);
1233 else
1234 arg1 = integer_zero_node;
1235 if (nargs > 2)
1236 arg2 = CALL_EXPR_ARG (exp, 2);
1237 else
1238 arg2 = integer_three_node;
1240 /* Argument 0 is an address. */
1241 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1243 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1244 if (TREE_CODE (arg1) != INTEGER_CST)
1246 error ("second argument to %<__builtin_prefetch%> must be a constant");
1247 arg1 = integer_zero_node;
1249 op1 = expand_normal (arg1);
1250 /* Argument 1 must be either zero or one. */
1251 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1253 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1254 " using zero");
1255 op1 = const0_rtx;
1258 /* Argument 2 (locality) must be a compile-time constant int. */
1259 if (TREE_CODE (arg2) != INTEGER_CST)
1261 error ("third argument to %<__builtin_prefetch%> must be a constant");
1262 arg2 = integer_zero_node;
1264 op2 = expand_normal (arg2);
1265 /* Argument 2 must be 0, 1, 2, or 3. */
1266 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1268 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1269 op2 = const0_rtx;
1272 #ifdef HAVE_prefetch
1273 if (HAVE_prefetch)
1275 struct expand_operand ops[3];
1277 create_address_operand (&ops[0], op0);
1278 create_integer_operand (&ops[1], INTVAL (op1));
1279 create_integer_operand (&ops[2], INTVAL (op2));
1280 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1281 return;
1283 #endif
1285 /* Don't do anything with direct references to volatile memory, but
1286 generate code to handle other side effects. */
1287 if (!MEM_P (op0) && side_effects_p (op0))
1288 emit_insn (op0);
1291 /* Get a MEM rtx for expression EXP which is the address of an operand
1292 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1293 the maximum length of the block of memory that might be accessed or
1294 NULL if unknown. */
1296 static rtx
1297 get_memory_rtx (tree exp, tree len)
1299 tree orig_exp = exp;
1300 rtx addr, mem;
1302 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1303 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1304 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1305 exp = TREE_OPERAND (exp, 0);
1307 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1308 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1310 /* Get an expression we can use to find the attributes to assign to MEM.
1311 First remove any nops. */
1312 while (CONVERT_EXPR_P (exp)
1313 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1314 exp = TREE_OPERAND (exp, 0);
1316 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1317 (as builtin stringops may alias with anything). */
1318 exp = fold_build2 (MEM_REF,
1319 build_array_type (char_type_node,
1320 build_range_type (sizetype,
1321 size_one_node, len)),
1322 exp, build_int_cst (ptr_type_node, 0));
1324 /* If the MEM_REF has no acceptable address, try to get the base object
1325 from the original address we got, and build an all-aliasing
1326 unknown-sized access to that one. */
1327 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1328 set_mem_attributes (mem, exp, 0);
1329 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1330 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1331 0))))
1333 exp = build_fold_addr_expr (exp);
1334 exp = fold_build2 (MEM_REF,
1335 build_array_type (char_type_node,
1336 build_range_type (sizetype,
1337 size_zero_node,
1338 NULL)),
1339 exp, build_int_cst (ptr_type_node, 0));
1340 set_mem_attributes (mem, exp, 0);
1342 set_mem_alias_set (mem, 0);
1343 return mem;
1346 /* Built-in functions to perform an untyped call and return. */
1348 #define apply_args_mode \
1349 (this_target_builtins->x_apply_args_mode)
1350 #define apply_result_mode \
1351 (this_target_builtins->x_apply_result_mode)
1353 /* Return the size required for the block returned by __builtin_apply_args,
1354 and initialize apply_args_mode. */
1356 static int
1357 apply_args_size (void)
1359 static int size = -1;
1360 int align;
1361 unsigned int regno;
1362 machine_mode mode;
1364 /* The values computed by this function never change. */
1365 if (size < 0)
1367 /* The first value is the incoming arg-pointer. */
1368 size = GET_MODE_SIZE (Pmode);
1370 /* The second value is the structure value address unless this is
1371 passed as an "invisible" first argument. */
1372 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1373 size += GET_MODE_SIZE (Pmode);
1375 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1376 if (FUNCTION_ARG_REGNO_P (regno))
1378 mode = targetm.calls.get_raw_arg_mode (regno);
1380 gcc_assert (mode != VOIDmode);
1382 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1383 if (size % align != 0)
1384 size = CEIL (size, align) * align;
1385 size += GET_MODE_SIZE (mode);
1386 apply_args_mode[regno] = mode;
1388 else
1390 apply_args_mode[regno] = VOIDmode;
1393 return size;
1396 /* Return the size required for the block returned by __builtin_apply,
1397 and initialize apply_result_mode. */
1399 static int
1400 apply_result_size (void)
1402 static int size = -1;
1403 int align, regno;
1404 machine_mode mode;
1406 /* The values computed by this function never change. */
1407 if (size < 0)
1409 size = 0;
1411 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1412 if (targetm.calls.function_value_regno_p (regno))
1414 mode = targetm.calls.get_raw_result_mode (regno);
1416 gcc_assert (mode != VOIDmode);
1418 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1419 if (size % align != 0)
1420 size = CEIL (size, align) * align;
1421 size += GET_MODE_SIZE (mode);
1422 apply_result_mode[regno] = mode;
1424 else
1425 apply_result_mode[regno] = VOIDmode;
1427 /* Allow targets that use untyped_call and untyped_return to override
1428 the size so that machine-specific information can be stored here. */
1429 #ifdef APPLY_RESULT_SIZE
1430 size = APPLY_RESULT_SIZE;
1431 #endif
1433 return size;
1436 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1437 /* Create a vector describing the result block RESULT. If SAVEP is true,
1438 the result block is used to save the values; otherwise it is used to
1439 restore the values. */
1441 static rtx
1442 result_vector (int savep, rtx result)
1444 int regno, size, align, nelts;
1445 machine_mode mode;
1446 rtx reg, mem;
1447 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1449 size = nelts = 0;
1450 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1451 if ((mode = apply_result_mode[regno]) != VOIDmode)
1453 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1454 if (size % align != 0)
1455 size = CEIL (size, align) * align;
1456 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1457 mem = adjust_address (result, mode, size);
1458 savevec[nelts++] = (savep
1459 ? gen_rtx_SET (VOIDmode, mem, reg)
1460 : gen_rtx_SET (VOIDmode, reg, mem));
1461 size += GET_MODE_SIZE (mode);
1463 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1465 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1467 /* Save the state required to perform an untyped call with the same
1468 arguments as were passed to the current function. */
1470 static rtx
1471 expand_builtin_apply_args_1 (void)
1473 rtx registers, tem;
1474 int size, align, regno;
1475 machine_mode mode;
1476 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1478 /* Create a block where the arg-pointer, structure value address,
1479 and argument registers can be saved. */
1480 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1482 /* Walk past the arg-pointer and structure value address. */
1483 size = GET_MODE_SIZE (Pmode);
1484 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1485 size += GET_MODE_SIZE (Pmode);
1487 /* Save each register used in calling a function to the block. */
1488 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1489 if ((mode = apply_args_mode[regno]) != VOIDmode)
1491 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1492 if (size % align != 0)
1493 size = CEIL (size, align) * align;
1495 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1497 emit_move_insn (adjust_address (registers, mode, size), tem);
1498 size += GET_MODE_SIZE (mode);
1501 /* Save the arg pointer to the block. */
1502 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1503 #ifdef STACK_GROWS_DOWNWARD
1504 /* We need the pointer as the caller actually passed them to us, not
1505 as we might have pretended they were passed. Make sure it's a valid
1506 operand, as emit_move_insn isn't expected to handle a PLUS. */
1508 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1509 NULL_RTX);
1510 #endif
1511 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1513 size = GET_MODE_SIZE (Pmode);
1515 /* Save the structure value address unless this is passed as an
1516 "invisible" first argument. */
1517 if (struct_incoming_value)
1519 emit_move_insn (adjust_address (registers, Pmode, size),
1520 copy_to_reg (struct_incoming_value));
1521 size += GET_MODE_SIZE (Pmode);
1524 /* Return the address of the block. */
1525 return copy_addr_to_reg (XEXP (registers, 0));
1528 /* __builtin_apply_args returns block of memory allocated on
1529 the stack into which is stored the arg pointer, structure
1530 value address, static chain, and all the registers that might
1531 possibly be used in performing a function call. The code is
1532 moved to the start of the function so the incoming values are
1533 saved. */
1535 static rtx
1536 expand_builtin_apply_args (void)
1538 /* Don't do __builtin_apply_args more than once in a function.
1539 Save the result of the first call and reuse it. */
1540 if (apply_args_value != 0)
1541 return apply_args_value;
1543 /* When this function is called, it means that registers must be
1544 saved on entry to this function. So we migrate the
1545 call to the first insn of this function. */
1546 rtx temp;
1547 rtx seq;
1549 start_sequence ();
1550 temp = expand_builtin_apply_args_1 ();
1551 seq = get_insns ();
1552 end_sequence ();
1554 apply_args_value = temp;
1556 /* Put the insns after the NOTE that starts the function.
1557 If this is inside a start_sequence, make the outer-level insn
1558 chain current, so the code is placed at the start of the
1559 function. If internal_arg_pointer is a non-virtual pseudo,
1560 it needs to be placed after the function that initializes
1561 that pseudo. */
1562 push_topmost_sequence ();
1563 if (REG_P (crtl->args.internal_arg_pointer)
1564 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1565 emit_insn_before (seq, parm_birth_insn);
1566 else
1567 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1568 pop_topmost_sequence ();
1569 return temp;
1573 /* Perform an untyped call and save the state required to perform an
1574 untyped return of whatever value was returned by the given function. */
1576 static rtx
1577 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1579 int size, align, regno;
1580 machine_mode mode;
1581 rtx incoming_args, result, reg, dest, src;
1582 rtx_call_insn *call_insn;
1583 rtx old_stack_level = 0;
1584 rtx call_fusage = 0;
1585 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1587 arguments = convert_memory_address (Pmode, arguments);
1589 /* Create a block where the return registers can be saved. */
1590 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1592 /* Fetch the arg pointer from the ARGUMENTS block. */
1593 incoming_args = gen_reg_rtx (Pmode);
1594 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1595 #ifndef STACK_GROWS_DOWNWARD
1596 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1597 incoming_args, 0, OPTAB_LIB_WIDEN);
1598 #endif
1600 /* Push a new argument block and copy the arguments. Do not allow
1601 the (potential) memcpy call below to interfere with our stack
1602 manipulations. */
1603 do_pending_stack_adjust ();
1604 NO_DEFER_POP;
1606 /* Save the stack with nonlocal if available. */
1607 #ifdef HAVE_save_stack_nonlocal
1608 if (HAVE_save_stack_nonlocal)
1609 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1610 else
1611 #endif
1612 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1614 /* Allocate a block of memory onto the stack and copy the memory
1615 arguments to the outgoing arguments address. We can pass TRUE
1616 as the 4th argument because we just saved the stack pointer
1617 and will restore it right after the call. */
1618 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1620 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1621 may have already set current_function_calls_alloca to true.
1622 current_function_calls_alloca won't be set if argsize is zero,
1623 so we have to guarantee need_drap is true here. */
1624 if (SUPPORTS_STACK_ALIGNMENT)
1625 crtl->need_drap = true;
1627 dest = virtual_outgoing_args_rtx;
1628 #ifndef STACK_GROWS_DOWNWARD
1629 if (CONST_INT_P (argsize))
1630 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1631 else
1632 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1633 #endif
1634 dest = gen_rtx_MEM (BLKmode, dest);
1635 set_mem_align (dest, PARM_BOUNDARY);
1636 src = gen_rtx_MEM (BLKmode, incoming_args);
1637 set_mem_align (src, PARM_BOUNDARY);
1638 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1640 /* Refer to the argument block. */
1641 apply_args_size ();
1642 arguments = gen_rtx_MEM (BLKmode, arguments);
1643 set_mem_align (arguments, PARM_BOUNDARY);
1645 /* Walk past the arg-pointer and structure value address. */
1646 size = GET_MODE_SIZE (Pmode);
1647 if (struct_value)
1648 size += GET_MODE_SIZE (Pmode);
1650 /* Restore each of the registers previously saved. Make USE insns
1651 for each of these registers for use in making the call. */
1652 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1653 if ((mode = apply_args_mode[regno]) != VOIDmode)
1655 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1656 if (size % align != 0)
1657 size = CEIL (size, align) * align;
1658 reg = gen_rtx_REG (mode, regno);
1659 emit_move_insn (reg, adjust_address (arguments, mode, size));
1660 use_reg (&call_fusage, reg);
1661 size += GET_MODE_SIZE (mode);
1664 /* Restore the structure value address unless this is passed as an
1665 "invisible" first argument. */
1666 size = GET_MODE_SIZE (Pmode);
1667 if (struct_value)
1669 rtx value = gen_reg_rtx (Pmode);
1670 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1671 emit_move_insn (struct_value, value);
1672 if (REG_P (struct_value))
1673 use_reg (&call_fusage, struct_value);
1674 size += GET_MODE_SIZE (Pmode);
1677 /* All arguments and registers used for the call are set up by now! */
1678 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1680 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1681 and we don't want to load it into a register as an optimization,
1682 because prepare_call_address already did it if it should be done. */
1683 if (GET_CODE (function) != SYMBOL_REF)
1684 function = memory_address (FUNCTION_MODE, function);
1686 /* Generate the actual call instruction and save the return value. */
1687 #ifdef HAVE_untyped_call
1688 if (HAVE_untyped_call)
1689 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1690 result, result_vector (1, result)));
1691 else
1692 #endif
1693 #ifdef HAVE_call_value
1694 if (HAVE_call_value)
1696 rtx valreg = 0;
1698 /* Locate the unique return register. It is not possible to
1699 express a call that sets more than one return register using
1700 call_value; use untyped_call for that. In fact, untyped_call
1701 only needs to save the return registers in the given block. */
1702 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1703 if ((mode = apply_result_mode[regno]) != VOIDmode)
1705 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1707 valreg = gen_rtx_REG (mode, regno);
1710 emit_call_insn (GEN_CALL_VALUE (valreg,
1711 gen_rtx_MEM (FUNCTION_MODE, function),
1712 const0_rtx, NULL_RTX, const0_rtx));
1714 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1716 else
1717 #endif
1718 gcc_unreachable ();
1720 /* Find the CALL insn we just emitted, and attach the register usage
1721 information. */
1722 call_insn = last_call_insn ();
1723 add_function_usage_to (call_insn, call_fusage);
1725 /* Restore the stack. */
1726 #ifdef HAVE_save_stack_nonlocal
1727 if (HAVE_save_stack_nonlocal)
1728 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1729 else
1730 #endif
1731 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1732 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1734 OK_DEFER_POP;
1736 /* Return the address of the result block. */
1737 result = copy_addr_to_reg (XEXP (result, 0));
1738 return convert_memory_address (ptr_mode, result);
1741 /* Perform an untyped return. */
1743 static void
1744 expand_builtin_return (rtx result)
1746 int size, align, regno;
1747 machine_mode mode;
1748 rtx reg;
1749 rtx_insn *call_fusage = 0;
1751 result = convert_memory_address (Pmode, result);
1753 apply_result_size ();
1754 result = gen_rtx_MEM (BLKmode, result);
1756 #ifdef HAVE_untyped_return
1757 if (HAVE_untyped_return)
1759 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1760 emit_barrier ();
1761 return;
1763 #endif
1765 /* Restore the return value and note that each value is used. */
1766 size = 0;
1767 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1768 if ((mode = apply_result_mode[regno]) != VOIDmode)
1770 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1771 if (size % align != 0)
1772 size = CEIL (size, align) * align;
1773 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1774 emit_move_insn (reg, adjust_address (result, mode, size));
1776 push_to_sequence (call_fusage);
1777 emit_use (reg);
1778 call_fusage = get_insns ();
1779 end_sequence ();
1780 size += GET_MODE_SIZE (mode);
1783 /* Put the USE insns before the return. */
1784 emit_insn (call_fusage);
1786 /* Return whatever values was restored by jumping directly to the end
1787 of the function. */
1788 expand_naked_return ();
1791 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1793 static enum type_class
1794 type_to_class (tree type)
1796 switch (TREE_CODE (type))
1798 case VOID_TYPE: return void_type_class;
1799 case INTEGER_TYPE: return integer_type_class;
1800 case ENUMERAL_TYPE: return enumeral_type_class;
1801 case BOOLEAN_TYPE: return boolean_type_class;
1802 case POINTER_TYPE: return pointer_type_class;
1803 case REFERENCE_TYPE: return reference_type_class;
1804 case OFFSET_TYPE: return offset_type_class;
1805 case REAL_TYPE: return real_type_class;
1806 case COMPLEX_TYPE: return complex_type_class;
1807 case FUNCTION_TYPE: return function_type_class;
1808 case METHOD_TYPE: return method_type_class;
1809 case RECORD_TYPE: return record_type_class;
1810 case UNION_TYPE:
1811 case QUAL_UNION_TYPE: return union_type_class;
1812 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1813 ? string_type_class : array_type_class);
1814 case LANG_TYPE: return lang_type_class;
1815 default: return no_type_class;
1819 /* Expand a call EXP to __builtin_classify_type. */
1821 static rtx
1822 expand_builtin_classify_type (tree exp)
1824 if (call_expr_nargs (exp))
1825 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1826 return GEN_INT (no_type_class);
1829 /* This helper macro, meant to be used in mathfn_built_in below,
1830 determines which among a set of three builtin math functions is
1831 appropriate for a given type mode. The `F' and `L' cases are
1832 automatically generated from the `double' case. */
1833 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1834 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1835 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1836 fcodel = BUILT_IN_MATHFN##L ; break;
1837 /* Similar to above, but appends _R after any F/L suffix. */
1838 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1839 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1840 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1841 fcodel = BUILT_IN_MATHFN##L_R ; break;
1843 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1844 if available. If IMPLICIT is true use the implicit builtin declaration,
1845 otherwise use the explicit declaration. If we can't do the conversion,
1846 return zero. */
1848 static tree
1849 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1851 enum built_in_function fcode, fcodef, fcodel, fcode2;
1853 switch (fn)
1855 CASE_MATHFN (BUILT_IN_ACOS)
1856 CASE_MATHFN (BUILT_IN_ACOSH)
1857 CASE_MATHFN (BUILT_IN_ASIN)
1858 CASE_MATHFN (BUILT_IN_ASINH)
1859 CASE_MATHFN (BUILT_IN_ATAN)
1860 CASE_MATHFN (BUILT_IN_ATAN2)
1861 CASE_MATHFN (BUILT_IN_ATANH)
1862 CASE_MATHFN (BUILT_IN_CBRT)
1863 CASE_MATHFN (BUILT_IN_CEIL)
1864 CASE_MATHFN (BUILT_IN_CEXPI)
1865 CASE_MATHFN (BUILT_IN_COPYSIGN)
1866 CASE_MATHFN (BUILT_IN_COS)
1867 CASE_MATHFN (BUILT_IN_COSH)
1868 CASE_MATHFN (BUILT_IN_DREM)
1869 CASE_MATHFN (BUILT_IN_ERF)
1870 CASE_MATHFN (BUILT_IN_ERFC)
1871 CASE_MATHFN (BUILT_IN_EXP)
1872 CASE_MATHFN (BUILT_IN_EXP10)
1873 CASE_MATHFN (BUILT_IN_EXP2)
1874 CASE_MATHFN (BUILT_IN_EXPM1)
1875 CASE_MATHFN (BUILT_IN_FABS)
1876 CASE_MATHFN (BUILT_IN_FDIM)
1877 CASE_MATHFN (BUILT_IN_FLOOR)
1878 CASE_MATHFN (BUILT_IN_FMA)
1879 CASE_MATHFN (BUILT_IN_FMAX)
1880 CASE_MATHFN (BUILT_IN_FMIN)
1881 CASE_MATHFN (BUILT_IN_FMOD)
1882 CASE_MATHFN (BUILT_IN_FREXP)
1883 CASE_MATHFN (BUILT_IN_GAMMA)
1884 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1885 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1886 CASE_MATHFN (BUILT_IN_HYPOT)
1887 CASE_MATHFN (BUILT_IN_ILOGB)
1888 CASE_MATHFN (BUILT_IN_ICEIL)
1889 CASE_MATHFN (BUILT_IN_IFLOOR)
1890 CASE_MATHFN (BUILT_IN_INF)
1891 CASE_MATHFN (BUILT_IN_IRINT)
1892 CASE_MATHFN (BUILT_IN_IROUND)
1893 CASE_MATHFN (BUILT_IN_ISINF)
1894 CASE_MATHFN (BUILT_IN_J0)
1895 CASE_MATHFN (BUILT_IN_J1)
1896 CASE_MATHFN (BUILT_IN_JN)
1897 CASE_MATHFN (BUILT_IN_LCEIL)
1898 CASE_MATHFN (BUILT_IN_LDEXP)
1899 CASE_MATHFN (BUILT_IN_LFLOOR)
1900 CASE_MATHFN (BUILT_IN_LGAMMA)
1901 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1902 CASE_MATHFN (BUILT_IN_LLCEIL)
1903 CASE_MATHFN (BUILT_IN_LLFLOOR)
1904 CASE_MATHFN (BUILT_IN_LLRINT)
1905 CASE_MATHFN (BUILT_IN_LLROUND)
1906 CASE_MATHFN (BUILT_IN_LOG)
1907 CASE_MATHFN (BUILT_IN_LOG10)
1908 CASE_MATHFN (BUILT_IN_LOG1P)
1909 CASE_MATHFN (BUILT_IN_LOG2)
1910 CASE_MATHFN (BUILT_IN_LOGB)
1911 CASE_MATHFN (BUILT_IN_LRINT)
1912 CASE_MATHFN (BUILT_IN_LROUND)
1913 CASE_MATHFN (BUILT_IN_MODF)
1914 CASE_MATHFN (BUILT_IN_NAN)
1915 CASE_MATHFN (BUILT_IN_NANS)
1916 CASE_MATHFN (BUILT_IN_NEARBYINT)
1917 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1918 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1919 CASE_MATHFN (BUILT_IN_POW)
1920 CASE_MATHFN (BUILT_IN_POWI)
1921 CASE_MATHFN (BUILT_IN_POW10)
1922 CASE_MATHFN (BUILT_IN_REMAINDER)
1923 CASE_MATHFN (BUILT_IN_REMQUO)
1924 CASE_MATHFN (BUILT_IN_RINT)
1925 CASE_MATHFN (BUILT_IN_ROUND)
1926 CASE_MATHFN (BUILT_IN_SCALB)
1927 CASE_MATHFN (BUILT_IN_SCALBLN)
1928 CASE_MATHFN (BUILT_IN_SCALBN)
1929 CASE_MATHFN (BUILT_IN_SIGNBIT)
1930 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1931 CASE_MATHFN (BUILT_IN_SIN)
1932 CASE_MATHFN (BUILT_IN_SINCOS)
1933 CASE_MATHFN (BUILT_IN_SINH)
1934 CASE_MATHFN (BUILT_IN_SQRT)
1935 CASE_MATHFN (BUILT_IN_TAN)
1936 CASE_MATHFN (BUILT_IN_TANH)
1937 CASE_MATHFN (BUILT_IN_TGAMMA)
1938 CASE_MATHFN (BUILT_IN_TRUNC)
1939 CASE_MATHFN (BUILT_IN_Y0)
1940 CASE_MATHFN (BUILT_IN_Y1)
1941 CASE_MATHFN (BUILT_IN_YN)
1943 default:
1944 return NULL_TREE;
1947 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1948 fcode2 = fcode;
1949 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1950 fcode2 = fcodef;
1951 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1952 fcode2 = fcodel;
1953 else
1954 return NULL_TREE;
1956 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1957 return NULL_TREE;
1959 return builtin_decl_explicit (fcode2);
1962 /* Like mathfn_built_in_1(), but always use the implicit array. */
1964 tree
1965 mathfn_built_in (tree type, enum built_in_function fn)
1967 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1970 /* If errno must be maintained, expand the RTL to check if the result,
1971 TARGET, of a built-in function call, EXP, is NaN, and if so set
1972 errno to EDOM. */
1974 static void
1975 expand_errno_check (tree exp, rtx target)
1977 rtx_code_label *lab = gen_label_rtx ();
1979 /* Test the result; if it is NaN, set errno=EDOM because
1980 the argument was not in the domain. */
1981 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1982 NULL_RTX, NULL_RTX, lab,
1983 /* The jump is very likely. */
1984 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1986 #ifdef TARGET_EDOM
1987 /* If this built-in doesn't throw an exception, set errno directly. */
1988 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1990 #ifdef GEN_ERRNO_RTX
1991 rtx errno_rtx = GEN_ERRNO_RTX;
1992 #else
1993 rtx errno_rtx
1994 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1995 #endif
1996 emit_move_insn (errno_rtx,
1997 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1998 emit_label (lab);
1999 return;
2001 #endif
2003 /* Make sure the library call isn't expanded as a tail call. */
2004 CALL_EXPR_TAILCALL (exp) = 0;
2006 /* We can't set errno=EDOM directly; let the library call do it.
2007 Pop the arguments right away in case the call gets deleted. */
2008 NO_DEFER_POP;
2009 expand_call (exp, target, 0);
2010 OK_DEFER_POP;
2011 emit_label (lab);
2014 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2015 Return NULL_RTX if a normal call should be emitted rather than expanding
2016 the function in-line. EXP is the expression that is a call to the builtin
2017 function; if convenient, the result should be placed in TARGET.
2018 SUBTARGET may be used as the target for computing one of EXP's operands. */
2020 static rtx
2021 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2023 optab builtin_optab;
2024 rtx op0;
2025 rtx_insn *insns;
2026 tree fndecl = get_callee_fndecl (exp);
2027 machine_mode mode;
2028 bool errno_set = false;
2029 bool try_widening = false;
2030 tree arg;
2032 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2033 return NULL_RTX;
2035 arg = CALL_EXPR_ARG (exp, 0);
2037 switch (DECL_FUNCTION_CODE (fndecl))
2039 CASE_FLT_FN (BUILT_IN_SQRT):
2040 errno_set = ! tree_expr_nonnegative_p (arg);
2041 try_widening = true;
2042 builtin_optab = sqrt_optab;
2043 break;
2044 CASE_FLT_FN (BUILT_IN_EXP):
2045 errno_set = true; builtin_optab = exp_optab; break;
2046 CASE_FLT_FN (BUILT_IN_EXP10):
2047 CASE_FLT_FN (BUILT_IN_POW10):
2048 errno_set = true; builtin_optab = exp10_optab; break;
2049 CASE_FLT_FN (BUILT_IN_EXP2):
2050 errno_set = true; builtin_optab = exp2_optab; break;
2051 CASE_FLT_FN (BUILT_IN_EXPM1):
2052 errno_set = true; builtin_optab = expm1_optab; break;
2053 CASE_FLT_FN (BUILT_IN_LOGB):
2054 errno_set = true; builtin_optab = logb_optab; break;
2055 CASE_FLT_FN (BUILT_IN_LOG):
2056 errno_set = true; builtin_optab = log_optab; break;
2057 CASE_FLT_FN (BUILT_IN_LOG10):
2058 errno_set = true; builtin_optab = log10_optab; break;
2059 CASE_FLT_FN (BUILT_IN_LOG2):
2060 errno_set = true; builtin_optab = log2_optab; break;
2061 CASE_FLT_FN (BUILT_IN_LOG1P):
2062 errno_set = true; builtin_optab = log1p_optab; break;
2063 CASE_FLT_FN (BUILT_IN_ASIN):
2064 builtin_optab = asin_optab; break;
2065 CASE_FLT_FN (BUILT_IN_ACOS):
2066 builtin_optab = acos_optab; break;
2067 CASE_FLT_FN (BUILT_IN_TAN):
2068 builtin_optab = tan_optab; break;
2069 CASE_FLT_FN (BUILT_IN_ATAN):
2070 builtin_optab = atan_optab; break;
2071 CASE_FLT_FN (BUILT_IN_FLOOR):
2072 builtin_optab = floor_optab; break;
2073 CASE_FLT_FN (BUILT_IN_CEIL):
2074 builtin_optab = ceil_optab; break;
2075 CASE_FLT_FN (BUILT_IN_TRUNC):
2076 builtin_optab = btrunc_optab; break;
2077 CASE_FLT_FN (BUILT_IN_ROUND):
2078 builtin_optab = round_optab; break;
2079 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2080 builtin_optab = nearbyint_optab;
2081 if (flag_trapping_math)
2082 break;
2083 /* Else fallthrough and expand as rint. */
2084 CASE_FLT_FN (BUILT_IN_RINT):
2085 builtin_optab = rint_optab; break;
2086 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2087 builtin_optab = significand_optab; break;
2088 default:
2089 gcc_unreachable ();
2092 /* Make a suitable register to place result in. */
2093 mode = TYPE_MODE (TREE_TYPE (exp));
2095 if (! flag_errno_math || ! HONOR_NANS (mode))
2096 errno_set = false;
2098 /* Before working hard, check whether the instruction is available, but try
2099 to widen the mode for specific operations. */
2100 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2101 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2102 && (!errno_set || !optimize_insn_for_size_p ()))
2104 rtx result = gen_reg_rtx (mode);
2106 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2107 need to expand the argument again. This way, we will not perform
2108 side-effects more the once. */
2109 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2111 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2113 start_sequence ();
2115 /* Compute into RESULT.
2116 Set RESULT to wherever the result comes back. */
2117 result = expand_unop (mode, builtin_optab, op0, result, 0);
2119 if (result != 0)
2121 if (errno_set)
2122 expand_errno_check (exp, result);
2124 /* Output the entire sequence. */
2125 insns = get_insns ();
2126 end_sequence ();
2127 emit_insn (insns);
2128 return result;
2131 /* If we were unable to expand via the builtin, stop the sequence
2132 (without outputting the insns) and call to the library function
2133 with the stabilized argument list. */
2134 end_sequence ();
2137 return expand_call (exp, target, target == const0_rtx);
2140 /* Expand a call to the builtin binary math functions (pow and atan2).
2141 Return NULL_RTX if a normal call should be emitted rather than expanding the
2142 function in-line. EXP is the expression that is a call to the builtin
2143 function; if convenient, the result should be placed in TARGET.
2144 SUBTARGET may be used as the target for computing one of EXP's
2145 operands. */
2147 static rtx
2148 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2150 optab builtin_optab;
2151 rtx op0, op1, result;
2152 rtx_insn *insns;
2153 int op1_type = REAL_TYPE;
2154 tree fndecl = get_callee_fndecl (exp);
2155 tree arg0, arg1;
2156 machine_mode mode;
2157 bool errno_set = true;
2159 switch (DECL_FUNCTION_CODE (fndecl))
2161 CASE_FLT_FN (BUILT_IN_SCALBN):
2162 CASE_FLT_FN (BUILT_IN_SCALBLN):
2163 CASE_FLT_FN (BUILT_IN_LDEXP):
2164 op1_type = INTEGER_TYPE;
2165 default:
2166 break;
2169 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2170 return NULL_RTX;
2172 arg0 = CALL_EXPR_ARG (exp, 0);
2173 arg1 = CALL_EXPR_ARG (exp, 1);
2175 switch (DECL_FUNCTION_CODE (fndecl))
2177 CASE_FLT_FN (BUILT_IN_POW):
2178 builtin_optab = pow_optab; break;
2179 CASE_FLT_FN (BUILT_IN_ATAN2):
2180 builtin_optab = atan2_optab; break;
2181 CASE_FLT_FN (BUILT_IN_SCALB):
2182 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2183 return 0;
2184 builtin_optab = scalb_optab; break;
2185 CASE_FLT_FN (BUILT_IN_SCALBN):
2186 CASE_FLT_FN (BUILT_IN_SCALBLN):
2187 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2188 return 0;
2189 /* Fall through... */
2190 CASE_FLT_FN (BUILT_IN_LDEXP):
2191 builtin_optab = ldexp_optab; break;
2192 CASE_FLT_FN (BUILT_IN_FMOD):
2193 builtin_optab = fmod_optab; break;
2194 CASE_FLT_FN (BUILT_IN_REMAINDER):
2195 CASE_FLT_FN (BUILT_IN_DREM):
2196 builtin_optab = remainder_optab; break;
2197 default:
2198 gcc_unreachable ();
2201 /* Make a suitable register to place result in. */
2202 mode = TYPE_MODE (TREE_TYPE (exp));
2204 /* Before working hard, check whether the instruction is available. */
2205 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2206 return NULL_RTX;
2208 result = gen_reg_rtx (mode);
2210 if (! flag_errno_math || ! HONOR_NANS (mode))
2211 errno_set = false;
2213 if (errno_set && optimize_insn_for_size_p ())
2214 return 0;
2216 /* Always stabilize the argument list. */
2217 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2218 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2220 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2221 op1 = expand_normal (arg1);
2223 start_sequence ();
2225 /* Compute into RESULT.
2226 Set RESULT to wherever the result comes back. */
2227 result = expand_binop (mode, builtin_optab, op0, op1,
2228 result, 0, OPTAB_DIRECT);
2230 /* If we were unable to expand via the builtin, stop the sequence
2231 (without outputting the insns) and call to the library function
2232 with the stabilized argument list. */
2233 if (result == 0)
2235 end_sequence ();
2236 return expand_call (exp, target, target == const0_rtx);
2239 if (errno_set)
2240 expand_errno_check (exp, result);
2242 /* Output the entire sequence. */
2243 insns = get_insns ();
2244 end_sequence ();
2245 emit_insn (insns);
2247 return result;
2250 /* Expand a call to the builtin trinary math functions (fma).
2251 Return NULL_RTX if a normal call should be emitted rather than expanding the
2252 function in-line. EXP is the expression that is a call to the builtin
2253 function; if convenient, the result should be placed in TARGET.
2254 SUBTARGET may be used as the target for computing one of EXP's
2255 operands. */
2257 static rtx
2258 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2260 optab builtin_optab;
2261 rtx op0, op1, op2, result;
2262 rtx_insn *insns;
2263 tree fndecl = get_callee_fndecl (exp);
2264 tree arg0, arg1, arg2;
2265 machine_mode mode;
2267 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2268 return NULL_RTX;
2270 arg0 = CALL_EXPR_ARG (exp, 0);
2271 arg1 = CALL_EXPR_ARG (exp, 1);
2272 arg2 = CALL_EXPR_ARG (exp, 2);
2274 switch (DECL_FUNCTION_CODE (fndecl))
2276 CASE_FLT_FN (BUILT_IN_FMA):
2277 builtin_optab = fma_optab; break;
2278 default:
2279 gcc_unreachable ();
2282 /* Make a suitable register to place result in. */
2283 mode = TYPE_MODE (TREE_TYPE (exp));
2285 /* Before working hard, check whether the instruction is available. */
2286 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2287 return NULL_RTX;
2289 result = gen_reg_rtx (mode);
2291 /* Always stabilize the argument list. */
2292 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2293 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2294 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2296 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2297 op1 = expand_normal (arg1);
2298 op2 = expand_normal (arg2);
2300 start_sequence ();
2302 /* Compute into RESULT.
2303 Set RESULT to wherever the result comes back. */
2304 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2305 result, 0);
2307 /* If we were unable to expand via the builtin, stop the sequence
2308 (without outputting the insns) and call to the library function
2309 with the stabilized argument list. */
2310 if (result == 0)
2312 end_sequence ();
2313 return expand_call (exp, target, target == const0_rtx);
2316 /* Output the entire sequence. */
2317 insns = get_insns ();
2318 end_sequence ();
2319 emit_insn (insns);
2321 return result;
2324 /* Expand a call to the builtin sin and cos math functions.
2325 Return NULL_RTX if a normal call should be emitted rather than expanding the
2326 function in-line. EXP is the expression that is a call to the builtin
2327 function; if convenient, the result should be placed in TARGET.
2328 SUBTARGET may be used as the target for computing one of EXP's
2329 operands. */
2331 static rtx
2332 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2334 optab builtin_optab;
2335 rtx op0;
2336 rtx_insn *insns;
2337 tree fndecl = get_callee_fndecl (exp);
2338 machine_mode mode;
2339 tree arg;
2341 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2342 return NULL_RTX;
2344 arg = CALL_EXPR_ARG (exp, 0);
2346 switch (DECL_FUNCTION_CODE (fndecl))
2348 CASE_FLT_FN (BUILT_IN_SIN):
2349 CASE_FLT_FN (BUILT_IN_COS):
2350 builtin_optab = sincos_optab; break;
2351 default:
2352 gcc_unreachable ();
2355 /* Make a suitable register to place result in. */
2356 mode = TYPE_MODE (TREE_TYPE (exp));
2358 /* Check if sincos insn is available, otherwise fallback
2359 to sin or cos insn. */
2360 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2361 switch (DECL_FUNCTION_CODE (fndecl))
2363 CASE_FLT_FN (BUILT_IN_SIN):
2364 builtin_optab = sin_optab; break;
2365 CASE_FLT_FN (BUILT_IN_COS):
2366 builtin_optab = cos_optab; break;
2367 default:
2368 gcc_unreachable ();
2371 /* Before working hard, check whether the instruction is available. */
2372 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2374 rtx result = gen_reg_rtx (mode);
2376 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2377 need to expand the argument again. This way, we will not perform
2378 side-effects more the once. */
2379 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2381 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2383 start_sequence ();
2385 /* Compute into RESULT.
2386 Set RESULT to wherever the result comes back. */
2387 if (builtin_optab == sincos_optab)
2389 int ok;
2391 switch (DECL_FUNCTION_CODE (fndecl))
2393 CASE_FLT_FN (BUILT_IN_SIN):
2394 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2395 break;
2396 CASE_FLT_FN (BUILT_IN_COS):
2397 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2398 break;
2399 default:
2400 gcc_unreachable ();
2402 gcc_assert (ok);
2404 else
2405 result = expand_unop (mode, builtin_optab, op0, result, 0);
2407 if (result != 0)
2409 /* Output the entire sequence. */
2410 insns = get_insns ();
2411 end_sequence ();
2412 emit_insn (insns);
2413 return result;
2416 /* If we were unable to expand via the builtin, stop the sequence
2417 (without outputting the insns) and call to the library function
2418 with the stabilized argument list. */
2419 end_sequence ();
2422 return expand_call (exp, target, target == const0_rtx);
2425 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2426 return an RTL instruction code that implements the functionality.
2427 If that isn't possible or available return CODE_FOR_nothing. */
2429 static enum insn_code
2430 interclass_mathfn_icode (tree arg, tree fndecl)
2432 bool errno_set = false;
2433 optab builtin_optab = unknown_optab;
2434 machine_mode mode;
2436 switch (DECL_FUNCTION_CODE (fndecl))
2438 CASE_FLT_FN (BUILT_IN_ILOGB):
2439 errno_set = true; builtin_optab = ilogb_optab; break;
2440 CASE_FLT_FN (BUILT_IN_ISINF):
2441 builtin_optab = isinf_optab; break;
2442 case BUILT_IN_ISNORMAL:
2443 case BUILT_IN_ISFINITE:
2444 CASE_FLT_FN (BUILT_IN_FINITE):
2445 case BUILT_IN_FINITED32:
2446 case BUILT_IN_FINITED64:
2447 case BUILT_IN_FINITED128:
2448 case BUILT_IN_ISINFD32:
2449 case BUILT_IN_ISINFD64:
2450 case BUILT_IN_ISINFD128:
2451 /* These builtins have no optabs (yet). */
2452 break;
2453 default:
2454 gcc_unreachable ();
2457 /* There's no easy way to detect the case we need to set EDOM. */
2458 if (flag_errno_math && errno_set)
2459 return CODE_FOR_nothing;
2461 /* Optab mode depends on the mode of the input argument. */
2462 mode = TYPE_MODE (TREE_TYPE (arg));
2464 if (builtin_optab)
2465 return optab_handler (builtin_optab, mode);
2466 return CODE_FOR_nothing;
2469 /* Expand a call to one of the builtin math functions that operate on
2470 floating point argument and output an integer result (ilogb, isinf,
2471 isnan, etc).
2472 Return 0 if a normal call should be emitted rather than expanding the
2473 function in-line. EXP is the expression that is a call to the builtin
2474 function; if convenient, the result should be placed in TARGET. */
2476 static rtx
2477 expand_builtin_interclass_mathfn (tree exp, rtx target)
2479 enum insn_code icode = CODE_FOR_nothing;
2480 rtx op0;
2481 tree fndecl = get_callee_fndecl (exp);
2482 machine_mode mode;
2483 tree arg;
2485 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2486 return NULL_RTX;
2488 arg = CALL_EXPR_ARG (exp, 0);
2489 icode = interclass_mathfn_icode (arg, fndecl);
2490 mode = TYPE_MODE (TREE_TYPE (arg));
2492 if (icode != CODE_FOR_nothing)
2494 struct expand_operand ops[1];
2495 rtx_insn *last = get_last_insn ();
2496 tree orig_arg = arg;
2498 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2499 need to expand the argument again. This way, we will not perform
2500 side-effects more the once. */
2501 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2503 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2505 if (mode != GET_MODE (op0))
2506 op0 = convert_to_mode (mode, op0, 0);
2508 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2509 if (maybe_legitimize_operands (icode, 0, 1, ops)
2510 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2511 return ops[0].value;
2513 delete_insns_since (last);
2514 CALL_EXPR_ARG (exp, 0) = orig_arg;
2517 return NULL_RTX;
2520 /* Expand a call to the builtin sincos math function.
2521 Return NULL_RTX if a normal call should be emitted rather than expanding the
2522 function in-line. EXP is the expression that is a call to the builtin
2523 function. */
2525 static rtx
2526 expand_builtin_sincos (tree exp)
2528 rtx op0, op1, op2, target1, target2;
2529 machine_mode mode;
2530 tree arg, sinp, cosp;
2531 int result;
2532 location_t loc = EXPR_LOCATION (exp);
2533 tree alias_type, alias_off;
2535 if (!validate_arglist (exp, REAL_TYPE,
2536 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2537 return NULL_RTX;
2539 arg = CALL_EXPR_ARG (exp, 0);
2540 sinp = CALL_EXPR_ARG (exp, 1);
2541 cosp = CALL_EXPR_ARG (exp, 2);
2543 /* Make a suitable register to place result in. */
2544 mode = TYPE_MODE (TREE_TYPE (arg));
2546 /* Check if sincos insn is available, otherwise emit the call. */
2547 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2548 return NULL_RTX;
2550 target1 = gen_reg_rtx (mode);
2551 target2 = gen_reg_rtx (mode);
2553 op0 = expand_normal (arg);
2554 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2555 alias_off = build_int_cst (alias_type, 0);
2556 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2557 sinp, alias_off));
2558 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2559 cosp, alias_off));
2561 /* Compute into target1 and target2.
2562 Set TARGET to wherever the result comes back. */
2563 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2564 gcc_assert (result);
2566 /* Move target1 and target2 to the memory locations indicated
2567 by op1 and op2. */
2568 emit_move_insn (op1, target1);
2569 emit_move_insn (op2, target2);
2571 return const0_rtx;
2574 /* Expand a call to the internal cexpi builtin to the sincos math function.
2575 EXP is the expression that is a call to the builtin function; if convenient,
2576 the result should be placed in TARGET. */
2578 static rtx
2579 expand_builtin_cexpi (tree exp, rtx target)
2581 tree fndecl = get_callee_fndecl (exp);
2582 tree arg, type;
2583 machine_mode mode;
2584 rtx op0, op1, op2;
2585 location_t loc = EXPR_LOCATION (exp);
2587 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2588 return NULL_RTX;
2590 arg = CALL_EXPR_ARG (exp, 0);
2591 type = TREE_TYPE (arg);
2592 mode = TYPE_MODE (TREE_TYPE (arg));
2594 /* Try expanding via a sincos optab, fall back to emitting a libcall
2595 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2596 is only generated from sincos, cexp or if we have either of them. */
2597 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2599 op1 = gen_reg_rtx (mode);
2600 op2 = gen_reg_rtx (mode);
2602 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2604 /* Compute into op1 and op2. */
2605 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2607 else if (targetm.libc_has_function (function_sincos))
2609 tree call, fn = NULL_TREE;
2610 tree top1, top2;
2611 rtx op1a, op2a;
2613 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2614 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2615 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2616 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2617 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2618 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2619 else
2620 gcc_unreachable ();
2622 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2623 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2624 op1a = copy_addr_to_reg (XEXP (op1, 0));
2625 op2a = copy_addr_to_reg (XEXP (op2, 0));
2626 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2627 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2629 /* Make sure not to fold the sincos call again. */
2630 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2631 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2632 call, 3, arg, top1, top2));
2634 else
2636 tree call, fn = NULL_TREE, narg;
2637 tree ctype = build_complex_type (type);
2639 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2640 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2641 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2642 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2643 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2644 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2645 else
2646 gcc_unreachable ();
2648 /* If we don't have a decl for cexp create one. This is the
2649 friendliest fallback if the user calls __builtin_cexpi
2650 without full target C99 function support. */
2651 if (fn == NULL_TREE)
2653 tree fntype;
2654 const char *name = NULL;
2656 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2657 name = "cexpf";
2658 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2659 name = "cexp";
2660 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2661 name = "cexpl";
2663 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2664 fn = build_fn_decl (name, fntype);
2667 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2668 build_real (type, dconst0), arg);
2670 /* Make sure not to fold the cexp call again. */
2671 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2672 return expand_expr (build_call_nary (ctype, call, 1, narg),
2673 target, VOIDmode, EXPAND_NORMAL);
2676 /* Now build the proper return type. */
2677 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2678 make_tree (TREE_TYPE (arg), op2),
2679 make_tree (TREE_TYPE (arg), op1)),
2680 target, VOIDmode, EXPAND_NORMAL);
2683 /* Conveniently construct a function call expression. FNDECL names the
2684 function to be called, N is the number of arguments, and the "..."
2685 parameters are the argument expressions. Unlike build_call_exr
2686 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2688 static tree
2689 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2691 va_list ap;
2692 tree fntype = TREE_TYPE (fndecl);
2693 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2695 va_start (ap, n);
2696 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2697 va_end (ap);
2698 SET_EXPR_LOCATION (fn, loc);
2699 return fn;
2702 /* Expand a call to one of the builtin rounding functions gcc defines
2703 as an extension (lfloor and lceil). As these are gcc extensions we
2704 do not need to worry about setting errno to EDOM.
2705 If expanding via optab fails, lower expression to (int)(floor(x)).
2706 EXP is the expression that is a call to the builtin function;
2707 if convenient, the result should be placed in TARGET. */
2709 static rtx
2710 expand_builtin_int_roundingfn (tree exp, rtx target)
2712 convert_optab builtin_optab;
2713 rtx op0, tmp;
2714 rtx_insn *insns;
2715 tree fndecl = get_callee_fndecl (exp);
2716 enum built_in_function fallback_fn;
2717 tree fallback_fndecl;
2718 machine_mode mode;
2719 tree arg;
2721 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2722 gcc_unreachable ();
2724 arg = CALL_EXPR_ARG (exp, 0);
2726 switch (DECL_FUNCTION_CODE (fndecl))
2728 CASE_FLT_FN (BUILT_IN_ICEIL):
2729 CASE_FLT_FN (BUILT_IN_LCEIL):
2730 CASE_FLT_FN (BUILT_IN_LLCEIL):
2731 builtin_optab = lceil_optab;
2732 fallback_fn = BUILT_IN_CEIL;
2733 break;
2735 CASE_FLT_FN (BUILT_IN_IFLOOR):
2736 CASE_FLT_FN (BUILT_IN_LFLOOR):
2737 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2738 builtin_optab = lfloor_optab;
2739 fallback_fn = BUILT_IN_FLOOR;
2740 break;
2742 default:
2743 gcc_unreachable ();
2746 /* Make a suitable register to place result in. */
2747 mode = TYPE_MODE (TREE_TYPE (exp));
2749 target = gen_reg_rtx (mode);
2751 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2752 need to expand the argument again. This way, we will not perform
2753 side-effects more the once. */
2754 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2756 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2758 start_sequence ();
2760 /* Compute into TARGET. */
2761 if (expand_sfix_optab (target, op0, builtin_optab))
2763 /* Output the entire sequence. */
2764 insns = get_insns ();
2765 end_sequence ();
2766 emit_insn (insns);
2767 return target;
2770 /* If we were unable to expand via the builtin, stop the sequence
2771 (without outputting the insns). */
2772 end_sequence ();
2774 /* Fall back to floating point rounding optab. */
2775 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2777 /* For non-C99 targets we may end up without a fallback fndecl here
2778 if the user called __builtin_lfloor directly. In this case emit
2779 a call to the floor/ceil variants nevertheless. This should result
2780 in the best user experience for not full C99 targets. */
2781 if (fallback_fndecl == NULL_TREE)
2783 tree fntype;
2784 const char *name = NULL;
2786 switch (DECL_FUNCTION_CODE (fndecl))
2788 case BUILT_IN_ICEIL:
2789 case BUILT_IN_LCEIL:
2790 case BUILT_IN_LLCEIL:
2791 name = "ceil";
2792 break;
2793 case BUILT_IN_ICEILF:
2794 case BUILT_IN_LCEILF:
2795 case BUILT_IN_LLCEILF:
2796 name = "ceilf";
2797 break;
2798 case BUILT_IN_ICEILL:
2799 case BUILT_IN_LCEILL:
2800 case BUILT_IN_LLCEILL:
2801 name = "ceill";
2802 break;
2803 case BUILT_IN_IFLOOR:
2804 case BUILT_IN_LFLOOR:
2805 case BUILT_IN_LLFLOOR:
2806 name = "floor";
2807 break;
2808 case BUILT_IN_IFLOORF:
2809 case BUILT_IN_LFLOORF:
2810 case BUILT_IN_LLFLOORF:
2811 name = "floorf";
2812 break;
2813 case BUILT_IN_IFLOORL:
2814 case BUILT_IN_LFLOORL:
2815 case BUILT_IN_LLFLOORL:
2816 name = "floorl";
2817 break;
2818 default:
2819 gcc_unreachable ();
2822 fntype = build_function_type_list (TREE_TYPE (arg),
2823 TREE_TYPE (arg), NULL_TREE);
2824 fallback_fndecl = build_fn_decl (name, fntype);
2827 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2829 tmp = expand_normal (exp);
2830 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2832 /* Truncate the result of floating point optab to integer
2833 via expand_fix (). */
2834 target = gen_reg_rtx (mode);
2835 expand_fix (target, tmp, 0);
2837 return target;
2840 /* Expand a call to one of the builtin math functions doing integer
2841 conversion (lrint).
2842 Return 0 if a normal call should be emitted rather than expanding the
2843 function in-line. EXP is the expression that is a call to the builtin
2844 function; if convenient, the result should be placed in TARGET. */
2846 static rtx
2847 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2849 convert_optab builtin_optab;
2850 rtx op0;
2851 rtx_insn *insns;
2852 tree fndecl = get_callee_fndecl (exp);
2853 tree arg;
2854 machine_mode mode;
2855 enum built_in_function fallback_fn = BUILT_IN_NONE;
2857 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2858 gcc_unreachable ();
2860 arg = CALL_EXPR_ARG (exp, 0);
2862 switch (DECL_FUNCTION_CODE (fndecl))
2864 CASE_FLT_FN (BUILT_IN_IRINT):
2865 fallback_fn = BUILT_IN_LRINT;
2866 /* FALLTHRU */
2867 CASE_FLT_FN (BUILT_IN_LRINT):
2868 CASE_FLT_FN (BUILT_IN_LLRINT):
2869 builtin_optab = lrint_optab;
2870 break;
2872 CASE_FLT_FN (BUILT_IN_IROUND):
2873 fallback_fn = BUILT_IN_LROUND;
2874 /* FALLTHRU */
2875 CASE_FLT_FN (BUILT_IN_LROUND):
2876 CASE_FLT_FN (BUILT_IN_LLROUND):
2877 builtin_optab = lround_optab;
2878 break;
2880 default:
2881 gcc_unreachable ();
2884 /* There's no easy way to detect the case we need to set EDOM. */
2885 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2886 return NULL_RTX;
2888 /* Make a suitable register to place result in. */
2889 mode = TYPE_MODE (TREE_TYPE (exp));
2891 /* There's no easy way to detect the case we need to set EDOM. */
2892 if (!flag_errno_math)
2894 rtx result = gen_reg_rtx (mode);
2896 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2897 need to expand the argument again. This way, we will not perform
2898 side-effects more the once. */
2899 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2901 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2903 start_sequence ();
2905 if (expand_sfix_optab (result, op0, builtin_optab))
2907 /* Output the entire sequence. */
2908 insns = get_insns ();
2909 end_sequence ();
2910 emit_insn (insns);
2911 return result;
2914 /* If we were unable to expand via the builtin, stop the sequence
2915 (without outputting the insns) and call to the library function
2916 with the stabilized argument list. */
2917 end_sequence ();
2920 if (fallback_fn != BUILT_IN_NONE)
2922 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2923 targets, (int) round (x) should never be transformed into
2924 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2925 a call to lround in the hope that the target provides at least some
2926 C99 functions. This should result in the best user experience for
2927 not full C99 targets. */
2928 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2929 fallback_fn, 0);
2931 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2932 fallback_fndecl, 1, arg);
2934 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2935 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2936 return convert_to_mode (mode, target, 0);
2939 return expand_call (exp, target, target == const0_rtx);
2942 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2943 a normal call should be emitted rather than expanding the function
2944 in-line. EXP is the expression that is a call to the builtin
2945 function; if convenient, the result should be placed in TARGET. */
2947 static rtx
2948 expand_builtin_powi (tree exp, rtx target)
2950 tree arg0, arg1;
2951 rtx op0, op1;
2952 machine_mode mode;
2953 machine_mode mode2;
2955 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2956 return NULL_RTX;
2958 arg0 = CALL_EXPR_ARG (exp, 0);
2959 arg1 = CALL_EXPR_ARG (exp, 1);
2960 mode = TYPE_MODE (TREE_TYPE (exp));
2962 /* Emit a libcall to libgcc. */
2964 /* Mode of the 2nd argument must match that of an int. */
2965 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2967 if (target == NULL_RTX)
2968 target = gen_reg_rtx (mode);
2970 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2971 if (GET_MODE (op0) != mode)
2972 op0 = convert_to_mode (mode, op0, 0);
2973 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2974 if (GET_MODE (op1) != mode2)
2975 op1 = convert_to_mode (mode2, op1, 0);
2977 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2978 target, LCT_CONST, mode, 2,
2979 op0, mode, op1, mode2);
2981 return target;
2984 /* Expand expression EXP which is a call to the strlen builtin. Return
2985 NULL_RTX if we failed the caller should emit a normal call, otherwise
2986 try to get the result in TARGET, if convenient. */
2988 static rtx
2989 expand_builtin_strlen (tree exp, rtx target,
2990 machine_mode target_mode)
2992 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2993 return NULL_RTX;
2994 else
2996 struct expand_operand ops[4];
2997 rtx pat;
2998 tree len;
2999 tree src = CALL_EXPR_ARG (exp, 0);
3000 rtx src_reg;
3001 rtx_insn *before_strlen;
3002 machine_mode insn_mode = target_mode;
3003 enum insn_code icode = CODE_FOR_nothing;
3004 unsigned int align;
3006 /* If the length can be computed at compile-time, return it. */
3007 len = c_strlen (src, 0);
3008 if (len)
3009 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3011 /* If the length can be computed at compile-time and is constant
3012 integer, but there are side-effects in src, evaluate
3013 src for side-effects, then return len.
3014 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3015 can be optimized into: i++; x = 3; */
3016 len = c_strlen (src, 1);
3017 if (len && TREE_CODE (len) == INTEGER_CST)
3019 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3020 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3023 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3025 /* If SRC is not a pointer type, don't do this operation inline. */
3026 if (align == 0)
3027 return NULL_RTX;
3029 /* Bail out if we can't compute strlen in the right mode. */
3030 while (insn_mode != VOIDmode)
3032 icode = optab_handler (strlen_optab, insn_mode);
3033 if (icode != CODE_FOR_nothing)
3034 break;
3036 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3038 if (insn_mode == VOIDmode)
3039 return NULL_RTX;
3041 /* Make a place to hold the source address. We will not expand
3042 the actual source until we are sure that the expansion will
3043 not fail -- there are trees that cannot be expanded twice. */
3044 src_reg = gen_reg_rtx (Pmode);
3046 /* Mark the beginning of the strlen sequence so we can emit the
3047 source operand later. */
3048 before_strlen = get_last_insn ();
3050 create_output_operand (&ops[0], target, insn_mode);
3051 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3052 create_integer_operand (&ops[2], 0);
3053 create_integer_operand (&ops[3], align);
3054 if (!maybe_expand_insn (icode, 4, ops))
3055 return NULL_RTX;
3057 /* Now that we are assured of success, expand the source. */
3058 start_sequence ();
3059 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3060 if (pat != src_reg)
3062 #ifdef POINTERS_EXTEND_UNSIGNED
3063 if (GET_MODE (pat) != Pmode)
3064 pat = convert_to_mode (Pmode, pat,
3065 POINTERS_EXTEND_UNSIGNED);
3066 #endif
3067 emit_move_insn (src_reg, pat);
3069 pat = get_insns ();
3070 end_sequence ();
3072 if (before_strlen)
3073 emit_insn_after (pat, before_strlen);
3074 else
3075 emit_insn_before (pat, get_insns ());
3077 /* Return the value in the proper mode for this function. */
3078 if (GET_MODE (ops[0].value) == target_mode)
3079 target = ops[0].value;
3080 else if (target != 0)
3081 convert_move (target, ops[0].value, 0);
3082 else
3083 target = convert_to_mode (target_mode, ops[0].value, 0);
3085 return target;
3089 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3090 bytes from constant string DATA + OFFSET and return it as target
3091 constant. */
3093 static rtx
3094 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3095 machine_mode mode)
3097 const char *str = (const char *) data;
3099 gcc_assert (offset >= 0
3100 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3101 <= strlen (str) + 1));
3103 return c_readstr (str + offset, mode);
3106 /* LEN specify length of the block of memcpy/memset operation.
3107 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3108 In some cases we can make very likely guess on max size, then we
3109 set it into PROBABLE_MAX_SIZE. */
3111 static void
3112 determine_block_size (tree len, rtx len_rtx,
3113 unsigned HOST_WIDE_INT *min_size,
3114 unsigned HOST_WIDE_INT *max_size,
3115 unsigned HOST_WIDE_INT *probable_max_size)
3117 if (CONST_INT_P (len_rtx))
3119 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3120 return;
3122 else
3124 wide_int min, max;
3125 enum value_range_type range_type = VR_UNDEFINED;
3127 /* Determine bounds from the type. */
3128 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3129 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3130 else
3131 *min_size = 0;
3132 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3133 *probable_max_size = *max_size
3134 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3135 else
3136 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3138 if (TREE_CODE (len) == SSA_NAME)
3139 range_type = get_range_info (len, &min, &max);
3140 if (range_type == VR_RANGE)
3142 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3143 *min_size = min.to_uhwi ();
3144 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3145 *probable_max_size = *max_size = max.to_uhwi ();
3147 else if (range_type == VR_ANTI_RANGE)
3149 /* Anti range 0...N lets us to determine minimal size to N+1. */
3150 if (min == 0)
3152 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3153 *min_size = max.to_uhwi () + 1;
3155 /* Code like
3157 int n;
3158 if (n < 100)
3159 memcpy (a, b, n)
3161 Produce anti range allowing negative values of N. We still
3162 can use the information and make a guess that N is not negative.
3164 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3165 *probable_max_size = min.to_uhwi () - 1;
3168 gcc_checking_assert (*max_size <=
3169 (unsigned HOST_WIDE_INT)
3170 GET_MODE_MASK (GET_MODE (len_rtx)));
3173 /* Expand a call EXP to the memcpy builtin.
3174 Return NULL_RTX if we failed, the caller should emit a normal call,
3175 otherwise try to get the result in TARGET, if convenient (and in
3176 mode MODE if that's convenient). */
3178 static rtx
3179 expand_builtin_memcpy (tree exp, rtx target)
3181 if (!validate_arglist (exp,
3182 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3183 return NULL_RTX;
3184 else
3186 tree dest = CALL_EXPR_ARG (exp, 0);
3187 tree src = CALL_EXPR_ARG (exp, 1);
3188 tree len = CALL_EXPR_ARG (exp, 2);
3189 const char *src_str;
3190 unsigned int src_align = get_pointer_alignment (src);
3191 unsigned int dest_align = get_pointer_alignment (dest);
3192 rtx dest_mem, src_mem, dest_addr, len_rtx;
3193 HOST_WIDE_INT expected_size = -1;
3194 unsigned int expected_align = 0;
3195 unsigned HOST_WIDE_INT min_size;
3196 unsigned HOST_WIDE_INT max_size;
3197 unsigned HOST_WIDE_INT probable_max_size;
3199 /* If DEST is not a pointer type, call the normal function. */
3200 if (dest_align == 0)
3201 return NULL_RTX;
3203 /* If either SRC is not a pointer type, don't do this
3204 operation in-line. */
3205 if (src_align == 0)
3206 return NULL_RTX;
3208 if (currently_expanding_gimple_stmt)
3209 stringop_block_profile (currently_expanding_gimple_stmt,
3210 &expected_align, &expected_size);
3212 if (expected_align < dest_align)
3213 expected_align = dest_align;
3214 dest_mem = get_memory_rtx (dest, len);
3215 set_mem_align (dest_mem, dest_align);
3216 len_rtx = expand_normal (len);
3217 determine_block_size (len, len_rtx, &min_size, &max_size,
3218 &probable_max_size);
3219 src_str = c_getstr (src);
3221 /* If SRC is a string constant and block move would be done
3222 by pieces, we can avoid loading the string from memory
3223 and only stored the computed constants. */
3224 if (src_str
3225 && CONST_INT_P (len_rtx)
3226 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3227 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3228 CONST_CAST (char *, src_str),
3229 dest_align, false))
3231 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3232 builtin_memcpy_read_str,
3233 CONST_CAST (char *, src_str),
3234 dest_align, false, 0);
3235 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3236 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3237 return dest_mem;
3240 src_mem = get_memory_rtx (src, len);
3241 set_mem_align (src_mem, src_align);
3243 /* Copy word part most expediently. */
3244 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3245 CALL_EXPR_TAILCALL (exp)
3246 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3247 expected_align, expected_size,
3248 min_size, max_size, probable_max_size);
3250 if (dest_addr == 0)
3252 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3253 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3255 return dest_addr;
3259 /* Expand a call EXP to the mempcpy builtin.
3260 Return NULL_RTX if we failed; the caller should emit a normal call,
3261 otherwise try to get the result in TARGET, if convenient (and in
3262 mode MODE if that's convenient). If ENDP is 0 return the
3263 destination pointer, if ENDP is 1 return the end pointer ala
3264 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3265 stpcpy. */
3267 static rtx
3268 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3270 if (!validate_arglist (exp,
3271 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3272 return NULL_RTX;
3273 else
3275 tree dest = CALL_EXPR_ARG (exp, 0);
3276 tree src = CALL_EXPR_ARG (exp, 1);
3277 tree len = CALL_EXPR_ARG (exp, 2);
3278 return expand_builtin_mempcpy_args (dest, src, len,
3279 target, mode, /*endp=*/ 1);
3283 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3284 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3285 so that this can also be called without constructing an actual CALL_EXPR.
3286 The other arguments and return value are the same as for
3287 expand_builtin_mempcpy. */
3289 static rtx
3290 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3291 rtx target, machine_mode mode, int endp)
3293 /* If return value is ignored, transform mempcpy into memcpy. */
3294 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3296 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3297 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3298 dest, src, len);
3299 return expand_expr (result, target, mode, EXPAND_NORMAL);
3301 else
3303 const char *src_str;
3304 unsigned int src_align = get_pointer_alignment (src);
3305 unsigned int dest_align = get_pointer_alignment (dest);
3306 rtx dest_mem, src_mem, len_rtx;
3308 /* If either SRC or DEST is not a pointer type, don't do this
3309 operation in-line. */
3310 if (dest_align == 0 || src_align == 0)
3311 return NULL_RTX;
3313 /* If LEN is not constant, call the normal function. */
3314 if (! tree_fits_uhwi_p (len))
3315 return NULL_RTX;
3317 len_rtx = expand_normal (len);
3318 src_str = c_getstr (src);
3320 /* If SRC is a string constant and block move would be done
3321 by pieces, we can avoid loading the string from memory
3322 and only stored the computed constants. */
3323 if (src_str
3324 && CONST_INT_P (len_rtx)
3325 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3326 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3327 CONST_CAST (char *, src_str),
3328 dest_align, false))
3330 dest_mem = get_memory_rtx (dest, len);
3331 set_mem_align (dest_mem, dest_align);
3332 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3333 builtin_memcpy_read_str,
3334 CONST_CAST (char *, src_str),
3335 dest_align, false, endp);
3336 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3337 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3338 return dest_mem;
3341 if (CONST_INT_P (len_rtx)
3342 && can_move_by_pieces (INTVAL (len_rtx),
3343 MIN (dest_align, src_align)))
3345 dest_mem = get_memory_rtx (dest, len);
3346 set_mem_align (dest_mem, dest_align);
3347 src_mem = get_memory_rtx (src, len);
3348 set_mem_align (src_mem, src_align);
3349 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3350 MIN (dest_align, src_align), endp);
3351 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3352 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3353 return dest_mem;
3356 return NULL_RTX;
3360 #ifndef HAVE_movstr
3361 # define HAVE_movstr 0
3362 # define CODE_FOR_movstr CODE_FOR_nothing
3363 #endif
3365 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3366 we failed, the caller should emit a normal call, otherwise try to
3367 get the result in TARGET, if convenient. If ENDP is 0 return the
3368 destination pointer, if ENDP is 1 return the end pointer ala
3369 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3370 stpcpy. */
3372 static rtx
3373 expand_movstr (tree dest, tree src, rtx target, int endp)
3375 struct expand_operand ops[3];
3376 rtx dest_mem;
3377 rtx src_mem;
3379 if (!HAVE_movstr)
3380 return NULL_RTX;
3382 dest_mem = get_memory_rtx (dest, NULL);
3383 src_mem = get_memory_rtx (src, NULL);
3384 if (!endp)
3386 target = force_reg (Pmode, XEXP (dest_mem, 0));
3387 dest_mem = replace_equiv_address (dest_mem, target);
3390 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3391 create_fixed_operand (&ops[1], dest_mem);
3392 create_fixed_operand (&ops[2], src_mem);
3393 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3394 return NULL_RTX;
3396 if (endp && target != const0_rtx)
3398 target = ops[0].value;
3399 /* movstr is supposed to set end to the address of the NUL
3400 terminator. If the caller requested a mempcpy-like return value,
3401 adjust it. */
3402 if (endp == 1)
3404 rtx tem = plus_constant (GET_MODE (target),
3405 gen_lowpart (GET_MODE (target), target), 1);
3406 emit_move_insn (target, force_operand (tem, NULL_RTX));
3409 return target;
3412 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3413 NULL_RTX if we failed the caller should emit a normal call, otherwise
3414 try to get the result in TARGET, if convenient (and in mode MODE if that's
3415 convenient). */
3417 static rtx
3418 expand_builtin_strcpy (tree exp, rtx target)
3420 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3422 tree dest = CALL_EXPR_ARG (exp, 0);
3423 tree src = CALL_EXPR_ARG (exp, 1);
3424 return expand_builtin_strcpy_args (dest, src, target);
3426 return NULL_RTX;
3429 /* Helper function to do the actual work for expand_builtin_strcpy. The
3430 arguments to the builtin_strcpy call DEST and SRC are broken out
3431 so that this can also be called without constructing an actual CALL_EXPR.
3432 The other arguments and return value are the same as for
3433 expand_builtin_strcpy. */
3435 static rtx
3436 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3438 return expand_movstr (dest, src, target, /*endp=*/0);
3441 /* Expand a call EXP to the stpcpy builtin.
3442 Return NULL_RTX if we failed the caller should emit a normal call,
3443 otherwise try to get the result in TARGET, if convenient (and in
3444 mode MODE if that's convenient). */
3446 static rtx
3447 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3449 tree dst, src;
3450 location_t loc = EXPR_LOCATION (exp);
3452 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3453 return NULL_RTX;
3455 dst = CALL_EXPR_ARG (exp, 0);
3456 src = CALL_EXPR_ARG (exp, 1);
3458 /* If return value is ignored, transform stpcpy into strcpy. */
3459 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3461 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3462 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3463 return expand_expr (result, target, mode, EXPAND_NORMAL);
3465 else
3467 tree len, lenp1;
3468 rtx ret;
3470 /* Ensure we get an actual string whose length can be evaluated at
3471 compile-time, not an expression containing a string. This is
3472 because the latter will potentially produce pessimized code
3473 when used to produce the return value. */
3474 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3475 return expand_movstr (dst, src, target, /*endp=*/2);
3477 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3478 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3479 target, mode, /*endp=*/2);
3481 if (ret)
3482 return ret;
3484 if (TREE_CODE (len) == INTEGER_CST)
3486 rtx len_rtx = expand_normal (len);
3488 if (CONST_INT_P (len_rtx))
3490 ret = expand_builtin_strcpy_args (dst, src, target);
3492 if (ret)
3494 if (! target)
3496 if (mode != VOIDmode)
3497 target = gen_reg_rtx (mode);
3498 else
3499 target = gen_reg_rtx (GET_MODE (ret));
3501 if (GET_MODE (target) != GET_MODE (ret))
3502 ret = gen_lowpart (GET_MODE (target), ret);
3504 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3505 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3506 gcc_assert (ret);
3508 return target;
3513 return expand_movstr (dst, src, target, /*endp=*/2);
3517 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3518 bytes from constant string DATA + OFFSET and return it as target
3519 constant. */
3522 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3523 machine_mode mode)
3525 const char *str = (const char *) data;
3527 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3528 return const0_rtx;
3530 return c_readstr (str + offset, mode);
3533 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3534 NULL_RTX if we failed the caller should emit a normal call. */
3536 static rtx
3537 expand_builtin_strncpy (tree exp, rtx target)
3539 location_t loc = EXPR_LOCATION (exp);
3541 if (validate_arglist (exp,
3542 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3544 tree dest = CALL_EXPR_ARG (exp, 0);
3545 tree src = CALL_EXPR_ARG (exp, 1);
3546 tree len = CALL_EXPR_ARG (exp, 2);
3547 tree slen = c_strlen (src, 1);
3549 /* We must be passed a constant len and src parameter. */
3550 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3551 return NULL_RTX;
3553 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3555 /* We're required to pad with trailing zeros if the requested
3556 len is greater than strlen(s2)+1. In that case try to
3557 use store_by_pieces, if it fails, punt. */
3558 if (tree_int_cst_lt (slen, len))
3560 unsigned int dest_align = get_pointer_alignment (dest);
3561 const char *p = c_getstr (src);
3562 rtx dest_mem;
3564 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3565 || !can_store_by_pieces (tree_to_uhwi (len),
3566 builtin_strncpy_read_str,
3567 CONST_CAST (char *, p),
3568 dest_align, false))
3569 return NULL_RTX;
3571 dest_mem = get_memory_rtx (dest, len);
3572 store_by_pieces (dest_mem, tree_to_uhwi (len),
3573 builtin_strncpy_read_str,
3574 CONST_CAST (char *, p), dest_align, false, 0);
3575 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3576 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3577 return dest_mem;
3580 return NULL_RTX;
3583 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3584 bytes from constant string DATA + OFFSET and return it as target
3585 constant. */
3588 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3589 machine_mode mode)
3591 const char *c = (const char *) data;
3592 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3594 memset (p, *c, GET_MODE_SIZE (mode));
3596 return c_readstr (p, mode);
3599 /* Callback routine for store_by_pieces. Return the RTL of a register
3600 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3601 char value given in the RTL register data. For example, if mode is
3602 4 bytes wide, return the RTL for 0x01010101*data. */
3604 static rtx
3605 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3606 machine_mode mode)
3608 rtx target, coeff;
3609 size_t size;
3610 char *p;
3612 size = GET_MODE_SIZE (mode);
3613 if (size == 1)
3614 return (rtx) data;
3616 p = XALLOCAVEC (char, size);
3617 memset (p, 1, size);
3618 coeff = c_readstr (p, mode);
3620 target = convert_to_mode (mode, (rtx) data, 1);
3621 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3622 return force_reg (mode, target);
3625 /* Expand expression EXP, which is a call to the memset builtin. Return
3626 NULL_RTX if we failed the caller should emit a normal call, otherwise
3627 try to get the result in TARGET, if convenient (and in mode MODE if that's
3628 convenient). */
3630 static rtx
3631 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3633 if (!validate_arglist (exp,
3634 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3635 return NULL_RTX;
3636 else
3638 tree dest = CALL_EXPR_ARG (exp, 0);
3639 tree val = CALL_EXPR_ARG (exp, 1);
3640 tree len = CALL_EXPR_ARG (exp, 2);
3641 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3645 /* Helper function to do the actual work for expand_builtin_memset. The
3646 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3647 so that this can also be called without constructing an actual CALL_EXPR.
3648 The other arguments and return value are the same as for
3649 expand_builtin_memset. */
3651 static rtx
3652 expand_builtin_memset_args (tree dest, tree val, tree len,
3653 rtx target, machine_mode mode, tree orig_exp)
3655 tree fndecl, fn;
3656 enum built_in_function fcode;
3657 machine_mode val_mode;
3658 char c;
3659 unsigned int dest_align;
3660 rtx dest_mem, dest_addr, len_rtx;
3661 HOST_WIDE_INT expected_size = -1;
3662 unsigned int expected_align = 0;
3663 unsigned HOST_WIDE_INT min_size;
3664 unsigned HOST_WIDE_INT max_size;
3665 unsigned HOST_WIDE_INT probable_max_size;
3667 dest_align = get_pointer_alignment (dest);
3669 /* If DEST is not a pointer type, don't do this operation in-line. */
3670 if (dest_align == 0)
3671 return NULL_RTX;
3673 if (currently_expanding_gimple_stmt)
3674 stringop_block_profile (currently_expanding_gimple_stmt,
3675 &expected_align, &expected_size);
3677 if (expected_align < dest_align)
3678 expected_align = dest_align;
3680 /* If the LEN parameter is zero, return DEST. */
3681 if (integer_zerop (len))
3683 /* Evaluate and ignore VAL in case it has side-effects. */
3684 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3685 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3688 /* Stabilize the arguments in case we fail. */
3689 dest = builtin_save_expr (dest);
3690 val = builtin_save_expr (val);
3691 len = builtin_save_expr (len);
3693 len_rtx = expand_normal (len);
3694 determine_block_size (len, len_rtx, &min_size, &max_size,
3695 &probable_max_size);
3696 dest_mem = get_memory_rtx (dest, len);
3697 val_mode = TYPE_MODE (unsigned_char_type_node);
3699 if (TREE_CODE (val) != INTEGER_CST)
3701 rtx val_rtx;
3703 val_rtx = expand_normal (val);
3704 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3706 /* Assume that we can memset by pieces if we can store
3707 * the coefficients by pieces (in the required modes).
3708 * We can't pass builtin_memset_gen_str as that emits RTL. */
3709 c = 1;
3710 if (tree_fits_uhwi_p (len)
3711 && can_store_by_pieces (tree_to_uhwi (len),
3712 builtin_memset_read_str, &c, dest_align,
3713 true))
3715 val_rtx = force_reg (val_mode, val_rtx);
3716 store_by_pieces (dest_mem, tree_to_uhwi (len),
3717 builtin_memset_gen_str, val_rtx, dest_align,
3718 true, 0);
3720 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3721 dest_align, expected_align,
3722 expected_size, min_size, max_size,
3723 probable_max_size))
3724 goto do_libcall;
3726 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3727 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3728 return dest_mem;
3731 if (target_char_cast (val, &c))
3732 goto do_libcall;
3734 if (c)
3736 if (tree_fits_uhwi_p (len)
3737 && can_store_by_pieces (tree_to_uhwi (len),
3738 builtin_memset_read_str, &c, dest_align,
3739 true))
3740 store_by_pieces (dest_mem, tree_to_uhwi (len),
3741 builtin_memset_read_str, &c, dest_align, true, 0);
3742 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3743 gen_int_mode (c, val_mode),
3744 dest_align, expected_align,
3745 expected_size, min_size, max_size,
3746 probable_max_size))
3747 goto do_libcall;
3749 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3750 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3751 return dest_mem;
3754 set_mem_align (dest_mem, dest_align);
3755 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3756 CALL_EXPR_TAILCALL (orig_exp)
3757 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3758 expected_align, expected_size,
3759 min_size, max_size,
3760 probable_max_size);
3762 if (dest_addr == 0)
3764 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3765 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3768 return dest_addr;
3770 do_libcall:
3771 fndecl = get_callee_fndecl (orig_exp);
3772 fcode = DECL_FUNCTION_CODE (fndecl);
3773 if (fcode == BUILT_IN_MEMSET)
3774 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3775 dest, val, len);
3776 else if (fcode == BUILT_IN_BZERO)
3777 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3778 dest, len);
3779 else
3780 gcc_unreachable ();
3781 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3782 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3783 return expand_call (fn, target, target == const0_rtx);
3786 /* Expand expression EXP, which is a call to the bzero builtin. Return
3787 NULL_RTX if we failed the caller should emit a normal call. */
3789 static rtx
3790 expand_builtin_bzero (tree exp)
3792 tree dest, size;
3793 location_t loc = EXPR_LOCATION (exp);
3795 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3796 return NULL_RTX;
3798 dest = CALL_EXPR_ARG (exp, 0);
3799 size = CALL_EXPR_ARG (exp, 1);
3801 /* New argument list transforming bzero(ptr x, int y) to
3802 memset(ptr x, int 0, size_t y). This is done this way
3803 so that if it isn't expanded inline, we fallback to
3804 calling bzero instead of memset. */
3806 return expand_builtin_memset_args (dest, integer_zero_node,
3807 fold_convert_loc (loc,
3808 size_type_node, size),
3809 const0_rtx, VOIDmode, exp);
3812 /* Expand expression EXP, which is a call to the memcmp built-in function.
3813 Return NULL_RTX if we failed and the caller should emit a normal call,
3814 otherwise try to get the result in TARGET, if convenient (and in mode
3815 MODE, if that's convenient). */
3817 static rtx
3818 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3819 ATTRIBUTE_UNUSED machine_mode mode)
3821 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3823 if (!validate_arglist (exp,
3824 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3825 return NULL_RTX;
3827 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3828 implementing memcmp because it will stop if it encounters two
3829 zero bytes. */
3830 #if defined HAVE_cmpmemsi
3832 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3833 rtx result;
3834 rtx insn;
3835 tree arg1 = CALL_EXPR_ARG (exp, 0);
3836 tree arg2 = CALL_EXPR_ARG (exp, 1);
3837 tree len = CALL_EXPR_ARG (exp, 2);
3839 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3840 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3841 machine_mode insn_mode;
3843 if (HAVE_cmpmemsi)
3844 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3845 else
3846 return NULL_RTX;
3848 /* If we don't have POINTER_TYPE, call the function. */
3849 if (arg1_align == 0 || arg2_align == 0)
3850 return NULL_RTX;
3852 /* Make a place to write the result of the instruction. */
3853 result = target;
3854 if (! (result != 0
3855 && REG_P (result) && GET_MODE (result) == insn_mode
3856 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3857 result = gen_reg_rtx (insn_mode);
3859 arg1_rtx = get_memory_rtx (arg1, len);
3860 arg2_rtx = get_memory_rtx (arg2, len);
3861 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3863 /* Set MEM_SIZE as appropriate. */
3864 if (CONST_INT_P (arg3_rtx))
3866 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3867 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3870 if (HAVE_cmpmemsi)
3871 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3872 GEN_INT (MIN (arg1_align, arg2_align)));
3873 else
3874 gcc_unreachable ();
3876 if (insn)
3877 emit_insn (insn);
3878 else
3879 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3880 TYPE_MODE (integer_type_node), 3,
3881 XEXP (arg1_rtx, 0), Pmode,
3882 XEXP (arg2_rtx, 0), Pmode,
3883 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3884 TYPE_UNSIGNED (sizetype)),
3885 TYPE_MODE (sizetype));
3887 /* Return the value in the proper mode for this function. */
3888 mode = TYPE_MODE (TREE_TYPE (exp));
3889 if (GET_MODE (result) == mode)
3890 return result;
3891 else if (target != 0)
3893 convert_move (target, result, 0);
3894 return target;
3896 else
3897 return convert_to_mode (mode, result, 0);
3899 #endif /* HAVE_cmpmemsi. */
3901 return NULL_RTX;
3904 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3905 if we failed the caller should emit a normal call, otherwise try to get
3906 the result in TARGET, if convenient. */
3908 static rtx
3909 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3911 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3912 return NULL_RTX;
3914 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3915 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3916 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3918 rtx arg1_rtx, arg2_rtx;
3919 rtx result, insn = NULL_RTX;
3920 tree fndecl, fn;
3921 tree arg1 = CALL_EXPR_ARG (exp, 0);
3922 tree arg2 = CALL_EXPR_ARG (exp, 1);
3924 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3925 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3927 /* If we don't have POINTER_TYPE, call the function. */
3928 if (arg1_align == 0 || arg2_align == 0)
3929 return NULL_RTX;
3931 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3932 arg1 = builtin_save_expr (arg1);
3933 arg2 = builtin_save_expr (arg2);
3935 arg1_rtx = get_memory_rtx (arg1, NULL);
3936 arg2_rtx = get_memory_rtx (arg2, NULL);
3938 #ifdef HAVE_cmpstrsi
3939 /* Try to call cmpstrsi. */
3940 if (HAVE_cmpstrsi)
3942 machine_mode insn_mode
3943 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3945 /* Make a place to write the result of the instruction. */
3946 result = target;
3947 if (! (result != 0
3948 && REG_P (result) && GET_MODE (result) == insn_mode
3949 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3950 result = gen_reg_rtx (insn_mode);
3952 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3953 GEN_INT (MIN (arg1_align, arg2_align)));
3955 #endif
3956 #ifdef HAVE_cmpstrnsi
3957 /* Try to determine at least one length and call cmpstrnsi. */
3958 if (!insn && HAVE_cmpstrnsi)
3960 tree len;
3961 rtx arg3_rtx;
3963 machine_mode insn_mode
3964 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3965 tree len1 = c_strlen (arg1, 1);
3966 tree len2 = c_strlen (arg2, 1);
3968 if (len1)
3969 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3970 if (len2)
3971 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3973 /* If we don't have a constant length for the first, use the length
3974 of the second, if we know it. We don't require a constant for
3975 this case; some cost analysis could be done if both are available
3976 but neither is constant. For now, assume they're equally cheap,
3977 unless one has side effects. If both strings have constant lengths,
3978 use the smaller. */
3980 if (!len1)
3981 len = len2;
3982 else if (!len2)
3983 len = len1;
3984 else if (TREE_SIDE_EFFECTS (len1))
3985 len = len2;
3986 else if (TREE_SIDE_EFFECTS (len2))
3987 len = len1;
3988 else if (TREE_CODE (len1) != INTEGER_CST)
3989 len = len2;
3990 else if (TREE_CODE (len2) != INTEGER_CST)
3991 len = len1;
3992 else if (tree_int_cst_lt (len1, len2))
3993 len = len1;
3994 else
3995 len = len2;
3997 /* If both arguments have side effects, we cannot optimize. */
3998 if (!len || TREE_SIDE_EFFECTS (len))
3999 goto do_libcall;
4001 arg3_rtx = expand_normal (len);
4003 /* Make a place to write the result of the instruction. */
4004 result = target;
4005 if (! (result != 0
4006 && REG_P (result) && GET_MODE (result) == insn_mode
4007 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4008 result = gen_reg_rtx (insn_mode);
4010 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4011 GEN_INT (MIN (arg1_align, arg2_align)));
4013 #endif
4015 if (insn)
4017 machine_mode mode;
4018 emit_insn (insn);
4020 /* Return the value in the proper mode for this function. */
4021 mode = TYPE_MODE (TREE_TYPE (exp));
4022 if (GET_MODE (result) == mode)
4023 return result;
4024 if (target == 0)
4025 return convert_to_mode (mode, result, 0);
4026 convert_move (target, result, 0);
4027 return target;
4030 /* Expand the library call ourselves using a stabilized argument
4031 list to avoid re-evaluating the function's arguments twice. */
4032 #ifdef HAVE_cmpstrnsi
4033 do_libcall:
4034 #endif
4035 fndecl = get_callee_fndecl (exp);
4036 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4037 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4038 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4039 return expand_call (fn, target, target == const0_rtx);
4041 #endif
4042 return NULL_RTX;
4045 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4046 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4047 the result in TARGET, if convenient. */
4049 static rtx
4050 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4051 ATTRIBUTE_UNUSED machine_mode mode)
4053 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4055 if (!validate_arglist (exp,
4056 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4057 return NULL_RTX;
4059 /* If c_strlen can determine an expression for one of the string
4060 lengths, and it doesn't have side effects, then emit cmpstrnsi
4061 using length MIN(strlen(string)+1, arg3). */
4062 #ifdef HAVE_cmpstrnsi
4063 if (HAVE_cmpstrnsi)
4065 tree len, len1, len2;
4066 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4067 rtx result, insn;
4068 tree fndecl, fn;
4069 tree arg1 = CALL_EXPR_ARG (exp, 0);
4070 tree arg2 = CALL_EXPR_ARG (exp, 1);
4071 tree arg3 = CALL_EXPR_ARG (exp, 2);
4073 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4074 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4075 machine_mode insn_mode
4076 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4078 len1 = c_strlen (arg1, 1);
4079 len2 = c_strlen (arg2, 1);
4081 if (len1)
4082 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4083 if (len2)
4084 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4086 /* If we don't have a constant length for the first, use the length
4087 of the second, if we know it. We don't require a constant for
4088 this case; some cost analysis could be done if both are available
4089 but neither is constant. For now, assume they're equally cheap,
4090 unless one has side effects. If both strings have constant lengths,
4091 use the smaller. */
4093 if (!len1)
4094 len = len2;
4095 else if (!len2)
4096 len = len1;
4097 else if (TREE_SIDE_EFFECTS (len1))
4098 len = len2;
4099 else if (TREE_SIDE_EFFECTS (len2))
4100 len = len1;
4101 else if (TREE_CODE (len1) != INTEGER_CST)
4102 len = len2;
4103 else if (TREE_CODE (len2) != INTEGER_CST)
4104 len = len1;
4105 else if (tree_int_cst_lt (len1, len2))
4106 len = len1;
4107 else
4108 len = len2;
4110 /* If both arguments have side effects, we cannot optimize. */
4111 if (!len || TREE_SIDE_EFFECTS (len))
4112 return NULL_RTX;
4114 /* The actual new length parameter is MIN(len,arg3). */
4115 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4116 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4118 /* If we don't have POINTER_TYPE, call the function. */
4119 if (arg1_align == 0 || arg2_align == 0)
4120 return NULL_RTX;
4122 /* Make a place to write the result of the instruction. */
4123 result = target;
4124 if (! (result != 0
4125 && REG_P (result) && GET_MODE (result) == insn_mode
4126 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4127 result = gen_reg_rtx (insn_mode);
4129 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4130 arg1 = builtin_save_expr (arg1);
4131 arg2 = builtin_save_expr (arg2);
4132 len = builtin_save_expr (len);
4134 arg1_rtx = get_memory_rtx (arg1, len);
4135 arg2_rtx = get_memory_rtx (arg2, len);
4136 arg3_rtx = expand_normal (len);
4137 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4138 GEN_INT (MIN (arg1_align, arg2_align)));
4139 if (insn)
4141 emit_insn (insn);
4143 /* Return the value in the proper mode for this function. */
4144 mode = TYPE_MODE (TREE_TYPE (exp));
4145 if (GET_MODE (result) == mode)
4146 return result;
4147 if (target == 0)
4148 return convert_to_mode (mode, result, 0);
4149 convert_move (target, result, 0);
4150 return target;
4153 /* Expand the library call ourselves using a stabilized argument
4154 list to avoid re-evaluating the function's arguments twice. */
4155 fndecl = get_callee_fndecl (exp);
4156 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4157 arg1, arg2, len);
4158 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4159 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4160 return expand_call (fn, target, target == const0_rtx);
4162 #endif
4163 return NULL_RTX;
4166 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4167 if that's convenient. */
4170 expand_builtin_saveregs (void)
4172 rtx val;
4173 rtx_insn *seq;
4175 /* Don't do __builtin_saveregs more than once in a function.
4176 Save the result of the first call and reuse it. */
4177 if (saveregs_value != 0)
4178 return saveregs_value;
4180 /* When this function is called, it means that registers must be
4181 saved on entry to this function. So we migrate the call to the
4182 first insn of this function. */
4184 start_sequence ();
4186 /* Do whatever the machine needs done in this case. */
4187 val = targetm.calls.expand_builtin_saveregs ();
4189 seq = get_insns ();
4190 end_sequence ();
4192 saveregs_value = val;
4194 /* Put the insns after the NOTE that starts the function. If this
4195 is inside a start_sequence, make the outer-level insn chain current, so
4196 the code is placed at the start of the function. */
4197 push_topmost_sequence ();
4198 emit_insn_after (seq, entry_of_function ());
4199 pop_topmost_sequence ();
4201 return val;
4204 /* Expand a call to __builtin_next_arg. */
4206 static rtx
4207 expand_builtin_next_arg (void)
4209 /* Checking arguments is already done in fold_builtin_next_arg
4210 that must be called before this function. */
4211 return expand_binop (ptr_mode, add_optab,
4212 crtl->args.internal_arg_pointer,
4213 crtl->args.arg_offset_rtx,
4214 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4217 /* Make it easier for the backends by protecting the valist argument
4218 from multiple evaluations. */
4220 static tree
4221 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4223 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4225 /* The current way of determining the type of valist is completely
4226 bogus. We should have the information on the va builtin instead. */
4227 if (!vatype)
4228 vatype = targetm.fn_abi_va_list (cfun->decl);
4230 if (TREE_CODE (vatype) == ARRAY_TYPE)
4232 if (TREE_SIDE_EFFECTS (valist))
4233 valist = save_expr (valist);
4235 /* For this case, the backends will be expecting a pointer to
4236 vatype, but it's possible we've actually been given an array
4237 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4238 So fix it. */
4239 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4241 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4242 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4245 else
4247 tree pt = build_pointer_type (vatype);
4249 if (! needs_lvalue)
4251 if (! TREE_SIDE_EFFECTS (valist))
4252 return valist;
4254 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4255 TREE_SIDE_EFFECTS (valist) = 1;
4258 if (TREE_SIDE_EFFECTS (valist))
4259 valist = save_expr (valist);
4260 valist = fold_build2_loc (loc, MEM_REF,
4261 vatype, valist, build_int_cst (pt, 0));
4264 return valist;
4267 /* The "standard" definition of va_list is void*. */
4269 tree
4270 std_build_builtin_va_list (void)
4272 return ptr_type_node;
4275 /* The "standard" abi va_list is va_list_type_node. */
4277 tree
4278 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4280 return va_list_type_node;
4283 /* The "standard" type of va_list is va_list_type_node. */
4285 tree
4286 std_canonical_va_list_type (tree type)
4288 tree wtype, htype;
4290 if (INDIRECT_REF_P (type))
4291 type = TREE_TYPE (type);
4292 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4293 type = TREE_TYPE (type);
4294 wtype = va_list_type_node;
4295 htype = type;
4296 /* Treat structure va_list types. */
4297 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4298 htype = TREE_TYPE (htype);
4299 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4301 /* If va_list is an array type, the argument may have decayed
4302 to a pointer type, e.g. by being passed to another function.
4303 In that case, unwrap both types so that we can compare the
4304 underlying records. */
4305 if (TREE_CODE (htype) == ARRAY_TYPE
4306 || POINTER_TYPE_P (htype))
4308 wtype = TREE_TYPE (wtype);
4309 htype = TREE_TYPE (htype);
4312 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4313 return va_list_type_node;
4315 return NULL_TREE;
4318 /* The "standard" implementation of va_start: just assign `nextarg' to
4319 the variable. */
4321 void
4322 std_expand_builtin_va_start (tree valist, rtx nextarg)
4324 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4325 convert_move (va_r, nextarg, 0);
4328 /* Expand EXP, a call to __builtin_va_start. */
4330 static rtx
4331 expand_builtin_va_start (tree exp)
4333 rtx nextarg;
4334 tree valist;
4335 location_t loc = EXPR_LOCATION (exp);
4337 if (call_expr_nargs (exp) < 2)
4339 error_at (loc, "too few arguments to function %<va_start%>");
4340 return const0_rtx;
4343 if (fold_builtin_next_arg (exp, true))
4344 return const0_rtx;
4346 nextarg = expand_builtin_next_arg ();
4347 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4349 if (targetm.expand_builtin_va_start)
4350 targetm.expand_builtin_va_start (valist, nextarg);
4351 else
4352 std_expand_builtin_va_start (valist, nextarg);
4354 return const0_rtx;
4357 /* Expand EXP, a call to __builtin_va_end. */
4359 static rtx
4360 expand_builtin_va_end (tree exp)
4362 tree valist = CALL_EXPR_ARG (exp, 0);
4364 /* Evaluate for side effects, if needed. I hate macros that don't
4365 do that. */
4366 if (TREE_SIDE_EFFECTS (valist))
4367 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4369 return const0_rtx;
4372 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4373 builtin rather than just as an assignment in stdarg.h because of the
4374 nastiness of array-type va_list types. */
4376 static rtx
4377 expand_builtin_va_copy (tree exp)
4379 tree dst, src, t;
4380 location_t loc = EXPR_LOCATION (exp);
4382 dst = CALL_EXPR_ARG (exp, 0);
4383 src = CALL_EXPR_ARG (exp, 1);
4385 dst = stabilize_va_list_loc (loc, dst, 1);
4386 src = stabilize_va_list_loc (loc, src, 0);
4388 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4390 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4392 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4393 TREE_SIDE_EFFECTS (t) = 1;
4394 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4396 else
4398 rtx dstb, srcb, size;
4400 /* Evaluate to pointers. */
4401 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4402 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4403 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4404 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4406 dstb = convert_memory_address (Pmode, dstb);
4407 srcb = convert_memory_address (Pmode, srcb);
4409 /* "Dereference" to BLKmode memories. */
4410 dstb = gen_rtx_MEM (BLKmode, dstb);
4411 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4412 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4413 srcb = gen_rtx_MEM (BLKmode, srcb);
4414 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4415 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4417 /* Copy. */
4418 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4421 return const0_rtx;
4424 /* Expand a call to one of the builtin functions __builtin_frame_address or
4425 __builtin_return_address. */
4427 static rtx
4428 expand_builtin_frame_address (tree fndecl, tree exp)
4430 /* The argument must be a nonnegative integer constant.
4431 It counts the number of frames to scan up the stack.
4432 The value is the return address saved in that frame. */
4433 if (call_expr_nargs (exp) == 0)
4434 /* Warning about missing arg was already issued. */
4435 return const0_rtx;
4436 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4438 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4439 error ("invalid argument to %<__builtin_frame_address%>");
4440 else
4441 error ("invalid argument to %<__builtin_return_address%>");
4442 return const0_rtx;
4444 else
4446 rtx tem
4447 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4448 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4450 /* Some ports cannot access arbitrary stack frames. */
4451 if (tem == NULL)
4453 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4454 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4455 else
4456 warning (0, "unsupported argument to %<__builtin_return_address%>");
4457 return const0_rtx;
4460 /* For __builtin_frame_address, return what we've got. */
4461 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4462 return tem;
4464 if (!REG_P (tem)
4465 && ! CONSTANT_P (tem))
4466 tem = copy_addr_to_reg (tem);
4467 return tem;
4471 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4472 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4473 is the same as for allocate_dynamic_stack_space. */
4475 static rtx
4476 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4478 rtx op0;
4479 rtx result;
4480 bool valid_arglist;
4481 unsigned int align;
4482 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4483 == BUILT_IN_ALLOCA_WITH_ALIGN);
4485 valid_arglist
4486 = (alloca_with_align
4487 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4488 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4490 if (!valid_arglist)
4491 return NULL_RTX;
4493 /* Compute the argument. */
4494 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4496 /* Compute the alignment. */
4497 align = (alloca_with_align
4498 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4499 : BIGGEST_ALIGNMENT);
4501 /* Allocate the desired space. */
4502 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4503 result = convert_memory_address (ptr_mode, result);
4505 return result;
4508 /* Expand a call to bswap builtin in EXP.
4509 Return NULL_RTX if a normal call should be emitted rather than expanding the
4510 function in-line. If convenient, the result should be placed in TARGET.
4511 SUBTARGET may be used as the target for computing one of EXP's operands. */
4513 static rtx
4514 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4515 rtx subtarget)
4517 tree arg;
4518 rtx op0;
4520 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4521 return NULL_RTX;
4523 arg = CALL_EXPR_ARG (exp, 0);
4524 op0 = expand_expr (arg,
4525 subtarget && GET_MODE (subtarget) == target_mode
4526 ? subtarget : NULL_RTX,
4527 target_mode, EXPAND_NORMAL);
4528 if (GET_MODE (op0) != target_mode)
4529 op0 = convert_to_mode (target_mode, op0, 1);
4531 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4533 gcc_assert (target);
4535 return convert_to_mode (target_mode, target, 1);
4538 /* Expand a call to a unary builtin in EXP.
4539 Return NULL_RTX if a normal call should be emitted rather than expanding the
4540 function in-line. If convenient, the result should be placed in TARGET.
4541 SUBTARGET may be used as the target for computing one of EXP's operands. */
4543 static rtx
4544 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4545 rtx subtarget, optab op_optab)
4547 rtx op0;
4549 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4550 return NULL_RTX;
4552 /* Compute the argument. */
4553 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4554 (subtarget
4555 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4556 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4557 VOIDmode, EXPAND_NORMAL);
4558 /* Compute op, into TARGET if possible.
4559 Set TARGET to wherever the result comes back. */
4560 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4561 op_optab, op0, target, op_optab != clrsb_optab);
4562 gcc_assert (target);
4564 return convert_to_mode (target_mode, target, 0);
4567 /* Expand a call to __builtin_expect. We just return our argument
4568 as the builtin_expect semantic should've been already executed by
4569 tree branch prediction pass. */
4571 static rtx
4572 expand_builtin_expect (tree exp, rtx target)
4574 tree arg;
4576 if (call_expr_nargs (exp) < 2)
4577 return const0_rtx;
4578 arg = CALL_EXPR_ARG (exp, 0);
4580 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4581 /* When guessing was done, the hints should be already stripped away. */
4582 gcc_assert (!flag_guess_branch_prob
4583 || optimize == 0 || seen_error ());
4584 return target;
4587 /* Expand a call to __builtin_assume_aligned. We just return our first
4588 argument as the builtin_assume_aligned semantic should've been already
4589 executed by CCP. */
4591 static rtx
4592 expand_builtin_assume_aligned (tree exp, rtx target)
4594 if (call_expr_nargs (exp) < 2)
4595 return const0_rtx;
4596 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4597 EXPAND_NORMAL);
4598 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4599 && (call_expr_nargs (exp) < 3
4600 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4601 return target;
4604 void
4605 expand_builtin_trap (void)
4607 #ifdef HAVE_trap
4608 if (HAVE_trap)
4610 rtx insn = emit_insn (gen_trap ());
4611 /* For trap insns when not accumulating outgoing args force
4612 REG_ARGS_SIZE note to prevent crossjumping of calls with
4613 different args sizes. */
4614 if (!ACCUMULATE_OUTGOING_ARGS)
4615 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4617 else
4618 #endif
4619 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4620 emit_barrier ();
4623 /* Expand a call to __builtin_unreachable. We do nothing except emit
4624 a barrier saying that control flow will not pass here.
4626 It is the responsibility of the program being compiled to ensure
4627 that control flow does never reach __builtin_unreachable. */
4628 static void
4629 expand_builtin_unreachable (void)
4631 emit_barrier ();
4634 /* Expand EXP, a call to fabs, fabsf or fabsl.
4635 Return NULL_RTX if a normal call should be emitted rather than expanding
4636 the function inline. If convenient, the result should be placed
4637 in TARGET. SUBTARGET may be used as the target for computing
4638 the operand. */
4640 static rtx
4641 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4643 machine_mode mode;
4644 tree arg;
4645 rtx op0;
4647 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4648 return NULL_RTX;
4650 arg = CALL_EXPR_ARG (exp, 0);
4651 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4652 mode = TYPE_MODE (TREE_TYPE (arg));
4653 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4654 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4657 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4658 Return NULL is a normal call should be emitted rather than expanding the
4659 function inline. If convenient, the result should be placed in TARGET.
4660 SUBTARGET may be used as the target for computing the operand. */
4662 static rtx
4663 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4665 rtx op0, op1;
4666 tree arg;
4668 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4669 return NULL_RTX;
4671 arg = CALL_EXPR_ARG (exp, 0);
4672 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4674 arg = CALL_EXPR_ARG (exp, 1);
4675 op1 = expand_normal (arg);
4677 return expand_copysign (op0, op1, target);
4680 /* Expand a call to __builtin___clear_cache. */
4682 static rtx
4683 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4685 #ifndef HAVE_clear_cache
4686 #ifdef CLEAR_INSN_CACHE
4687 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4688 does something. Just do the default expansion to a call to
4689 __clear_cache(). */
4690 return NULL_RTX;
4691 #else
4692 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4693 does nothing. There is no need to call it. Do nothing. */
4694 return const0_rtx;
4695 #endif /* CLEAR_INSN_CACHE */
4696 #else
4697 /* We have a "clear_cache" insn, and it will handle everything. */
4698 tree begin, end;
4699 rtx begin_rtx, end_rtx;
4701 /* We must not expand to a library call. If we did, any
4702 fallback library function in libgcc that might contain a call to
4703 __builtin___clear_cache() would recurse infinitely. */
4704 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4706 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4707 return const0_rtx;
4710 if (HAVE_clear_cache)
4712 struct expand_operand ops[2];
4714 begin = CALL_EXPR_ARG (exp, 0);
4715 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4717 end = CALL_EXPR_ARG (exp, 1);
4718 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4720 create_address_operand (&ops[0], begin_rtx);
4721 create_address_operand (&ops[1], end_rtx);
4722 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4723 return const0_rtx;
4725 return const0_rtx;
4726 #endif /* HAVE_clear_cache */
4729 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4731 static rtx
4732 round_trampoline_addr (rtx tramp)
4734 rtx temp, addend, mask;
4736 /* If we don't need too much alignment, we'll have been guaranteed
4737 proper alignment by get_trampoline_type. */
4738 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4739 return tramp;
4741 /* Round address up to desired boundary. */
4742 temp = gen_reg_rtx (Pmode);
4743 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4744 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4746 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4747 temp, 0, OPTAB_LIB_WIDEN);
4748 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4749 temp, 0, OPTAB_LIB_WIDEN);
4751 return tramp;
4754 static rtx
4755 expand_builtin_init_trampoline (tree exp, bool onstack)
4757 tree t_tramp, t_func, t_chain;
4758 rtx m_tramp, r_tramp, r_chain, tmp;
4760 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4761 POINTER_TYPE, VOID_TYPE))
4762 return NULL_RTX;
4764 t_tramp = CALL_EXPR_ARG (exp, 0);
4765 t_func = CALL_EXPR_ARG (exp, 1);
4766 t_chain = CALL_EXPR_ARG (exp, 2);
4768 r_tramp = expand_normal (t_tramp);
4769 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4770 MEM_NOTRAP_P (m_tramp) = 1;
4772 /* If ONSTACK, the TRAMP argument should be the address of a field
4773 within the local function's FRAME decl. Either way, let's see if
4774 we can fill in the MEM_ATTRs for this memory. */
4775 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4776 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4778 /* Creator of a heap trampoline is responsible for making sure the
4779 address is aligned to at least STACK_BOUNDARY. Normally malloc
4780 will ensure this anyhow. */
4781 tmp = round_trampoline_addr (r_tramp);
4782 if (tmp != r_tramp)
4784 m_tramp = change_address (m_tramp, BLKmode, tmp);
4785 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4786 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4789 /* The FUNC argument should be the address of the nested function.
4790 Extract the actual function decl to pass to the hook. */
4791 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4792 t_func = TREE_OPERAND (t_func, 0);
4793 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4795 r_chain = expand_normal (t_chain);
4797 /* Generate insns to initialize the trampoline. */
4798 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4800 if (onstack)
4802 trampolines_created = 1;
4804 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4805 "trampoline generated for nested function %qD", t_func);
4808 return const0_rtx;
4811 static rtx
4812 expand_builtin_adjust_trampoline (tree exp)
4814 rtx tramp;
4816 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4817 return NULL_RTX;
4819 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4820 tramp = round_trampoline_addr (tramp);
4821 if (targetm.calls.trampoline_adjust_address)
4822 tramp = targetm.calls.trampoline_adjust_address (tramp);
4824 return tramp;
4827 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4828 function. The function first checks whether the back end provides
4829 an insn to implement signbit for the respective mode. If not, it
4830 checks whether the floating point format of the value is such that
4831 the sign bit can be extracted. If that is not the case, the
4832 function returns NULL_RTX to indicate that a normal call should be
4833 emitted rather than expanding the function in-line. EXP is the
4834 expression that is a call to the builtin function; if convenient,
4835 the result should be placed in TARGET. */
4836 static rtx
4837 expand_builtin_signbit (tree exp, rtx target)
4839 const struct real_format *fmt;
4840 machine_mode fmode, imode, rmode;
4841 tree arg;
4842 int word, bitpos;
4843 enum insn_code icode;
4844 rtx temp;
4845 location_t loc = EXPR_LOCATION (exp);
4847 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4848 return NULL_RTX;
4850 arg = CALL_EXPR_ARG (exp, 0);
4851 fmode = TYPE_MODE (TREE_TYPE (arg));
4852 rmode = TYPE_MODE (TREE_TYPE (exp));
4853 fmt = REAL_MODE_FORMAT (fmode);
4855 arg = builtin_save_expr (arg);
4857 /* Expand the argument yielding a RTX expression. */
4858 temp = expand_normal (arg);
4860 /* Check if the back end provides an insn that handles signbit for the
4861 argument's mode. */
4862 icode = optab_handler (signbit_optab, fmode);
4863 if (icode != CODE_FOR_nothing)
4865 rtx_insn *last = get_last_insn ();
4866 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4867 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4868 return target;
4869 delete_insns_since (last);
4872 /* For floating point formats without a sign bit, implement signbit
4873 as "ARG < 0.0". */
4874 bitpos = fmt->signbit_ro;
4875 if (bitpos < 0)
4877 /* But we can't do this if the format supports signed zero. */
4878 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4879 return NULL_RTX;
4881 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4882 build_real (TREE_TYPE (arg), dconst0));
4883 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4886 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4888 imode = int_mode_for_mode (fmode);
4889 if (imode == BLKmode)
4890 return NULL_RTX;
4891 temp = gen_lowpart (imode, temp);
4893 else
4895 imode = word_mode;
4896 /* Handle targets with different FP word orders. */
4897 if (FLOAT_WORDS_BIG_ENDIAN)
4898 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4899 else
4900 word = bitpos / BITS_PER_WORD;
4901 temp = operand_subword_force (temp, word, fmode);
4902 bitpos = bitpos % BITS_PER_WORD;
4905 /* Force the intermediate word_mode (or narrower) result into a
4906 register. This avoids attempting to create paradoxical SUBREGs
4907 of floating point modes below. */
4908 temp = force_reg (imode, temp);
4910 /* If the bitpos is within the "result mode" lowpart, the operation
4911 can be implement with a single bitwise AND. Otherwise, we need
4912 a right shift and an AND. */
4914 if (bitpos < GET_MODE_BITSIZE (rmode))
4916 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4918 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4919 temp = gen_lowpart (rmode, temp);
4920 temp = expand_binop (rmode, and_optab, temp,
4921 immed_wide_int_const (mask, rmode),
4922 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4924 else
4926 /* Perform a logical right shift to place the signbit in the least
4927 significant bit, then truncate the result to the desired mode
4928 and mask just this bit. */
4929 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4930 temp = gen_lowpart (rmode, temp);
4931 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4932 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4935 return temp;
4938 /* Expand fork or exec calls. TARGET is the desired target of the
4939 call. EXP is the call. FN is the
4940 identificator of the actual function. IGNORE is nonzero if the
4941 value is to be ignored. */
4943 static rtx
4944 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4946 tree id, decl;
4947 tree call;
4949 /* If we are not profiling, just call the function. */
4950 if (!profile_arc_flag)
4951 return NULL_RTX;
4953 /* Otherwise call the wrapper. This should be equivalent for the rest of
4954 compiler, so the code does not diverge, and the wrapper may run the
4955 code necessary for keeping the profiling sane. */
4957 switch (DECL_FUNCTION_CODE (fn))
4959 case BUILT_IN_FORK:
4960 id = get_identifier ("__gcov_fork");
4961 break;
4963 case BUILT_IN_EXECL:
4964 id = get_identifier ("__gcov_execl");
4965 break;
4967 case BUILT_IN_EXECV:
4968 id = get_identifier ("__gcov_execv");
4969 break;
4971 case BUILT_IN_EXECLP:
4972 id = get_identifier ("__gcov_execlp");
4973 break;
4975 case BUILT_IN_EXECLE:
4976 id = get_identifier ("__gcov_execle");
4977 break;
4979 case BUILT_IN_EXECVP:
4980 id = get_identifier ("__gcov_execvp");
4981 break;
4983 case BUILT_IN_EXECVE:
4984 id = get_identifier ("__gcov_execve");
4985 break;
4987 default:
4988 gcc_unreachable ();
4991 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4992 FUNCTION_DECL, id, TREE_TYPE (fn));
4993 DECL_EXTERNAL (decl) = 1;
4994 TREE_PUBLIC (decl) = 1;
4995 DECL_ARTIFICIAL (decl) = 1;
4996 TREE_NOTHROW (decl) = 1;
4997 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4998 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4999 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5000 return expand_call (call, target, ignore);
5005 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5006 the pointer in these functions is void*, the tree optimizers may remove
5007 casts. The mode computed in expand_builtin isn't reliable either, due
5008 to __sync_bool_compare_and_swap.
5010 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5011 group of builtins. This gives us log2 of the mode size. */
5013 static inline machine_mode
5014 get_builtin_sync_mode (int fcode_diff)
5016 /* The size is not negotiable, so ask not to get BLKmode in return
5017 if the target indicates that a smaller size would be better. */
5018 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5021 /* Expand the memory expression LOC and return the appropriate memory operand
5022 for the builtin_sync operations. */
5024 static rtx
5025 get_builtin_sync_mem (tree loc, machine_mode mode)
5027 rtx addr, mem;
5029 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5030 addr = convert_memory_address (Pmode, addr);
5032 /* Note that we explicitly do not want any alias information for this
5033 memory, so that we kill all other live memories. Otherwise we don't
5034 satisfy the full barrier semantics of the intrinsic. */
5035 mem = validize_mem (gen_rtx_MEM (mode, addr));
5037 /* The alignment needs to be at least according to that of the mode. */
5038 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5039 get_pointer_alignment (loc)));
5040 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5041 MEM_VOLATILE_P (mem) = 1;
5043 return mem;
5046 /* Make sure an argument is in the right mode.
5047 EXP is the tree argument.
5048 MODE is the mode it should be in. */
5050 static rtx
5051 expand_expr_force_mode (tree exp, machine_mode mode)
5053 rtx val;
5054 machine_mode old_mode;
5056 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5057 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5058 of CONST_INTs, where we know the old_mode only from the call argument. */
5060 old_mode = GET_MODE (val);
5061 if (old_mode == VOIDmode)
5062 old_mode = TYPE_MODE (TREE_TYPE (exp));
5063 val = convert_modes (mode, old_mode, val, 1);
5064 return val;
5068 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5069 EXP is the CALL_EXPR. CODE is the rtx code
5070 that corresponds to the arithmetic or logical operation from the name;
5071 an exception here is that NOT actually means NAND. TARGET is an optional
5072 place for us to store the results; AFTER is true if this is the
5073 fetch_and_xxx form. */
5075 static rtx
5076 expand_builtin_sync_operation (machine_mode mode, tree exp,
5077 enum rtx_code code, bool after,
5078 rtx target)
5080 rtx val, mem;
5081 location_t loc = EXPR_LOCATION (exp);
5083 if (code == NOT && warn_sync_nand)
5085 tree fndecl = get_callee_fndecl (exp);
5086 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5088 static bool warned_f_a_n, warned_n_a_f;
5090 switch (fcode)
5092 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5093 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5094 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5095 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5096 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5097 if (warned_f_a_n)
5098 break;
5100 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5101 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5102 warned_f_a_n = true;
5103 break;
5105 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5106 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5107 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5108 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5109 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5110 if (warned_n_a_f)
5111 break;
5113 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5114 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5115 warned_n_a_f = true;
5116 break;
5118 default:
5119 gcc_unreachable ();
5123 /* Expand the operands. */
5124 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5125 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5127 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5128 after);
5131 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5132 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5133 true if this is the boolean form. TARGET is a place for us to store the
5134 results; this is NOT optional if IS_BOOL is true. */
5136 static rtx
5137 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5138 bool is_bool, rtx target)
5140 rtx old_val, new_val, mem;
5141 rtx *pbool, *poval;
5143 /* Expand the operands. */
5144 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5145 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5146 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5148 pbool = poval = NULL;
5149 if (target != const0_rtx)
5151 if (is_bool)
5152 pbool = &target;
5153 else
5154 poval = &target;
5156 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5157 false, MEMMODEL_SEQ_CST,
5158 MEMMODEL_SEQ_CST))
5159 return NULL_RTX;
5161 return target;
5164 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5165 general form is actually an atomic exchange, and some targets only
5166 support a reduced form with the second argument being a constant 1.
5167 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5168 the results. */
5170 static rtx
5171 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5172 rtx target)
5174 rtx val, mem;
5176 /* Expand the operands. */
5177 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5178 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5180 return expand_sync_lock_test_and_set (target, mem, val);
5183 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5185 static void
5186 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5188 rtx mem;
5190 /* Expand the operands. */
5191 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5193 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5196 /* Given an integer representing an ``enum memmodel'', verify its
5197 correctness and return the memory model enum. */
5199 static enum memmodel
5200 get_memmodel (tree exp)
5202 rtx op;
5203 unsigned HOST_WIDE_INT val;
5205 /* If the parameter is not a constant, it's a run time value so we'll just
5206 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5207 if (TREE_CODE (exp) != INTEGER_CST)
5208 return MEMMODEL_SEQ_CST;
5210 op = expand_normal (exp);
5212 val = INTVAL (op);
5213 if (targetm.memmodel_check)
5214 val = targetm.memmodel_check (val);
5215 else if (val & ~MEMMODEL_MASK)
5217 warning (OPT_Winvalid_memory_model,
5218 "Unknown architecture specifier in memory model to builtin.");
5219 return MEMMODEL_SEQ_CST;
5222 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5224 warning (OPT_Winvalid_memory_model,
5225 "invalid memory model argument to builtin");
5226 return MEMMODEL_SEQ_CST;
5229 return (enum memmodel) val;
5232 /* Expand the __atomic_exchange intrinsic:
5233 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5234 EXP is the CALL_EXPR.
5235 TARGET is an optional place for us to store the results. */
5237 static rtx
5238 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5240 rtx val, mem;
5241 enum memmodel model;
5243 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5244 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5246 error ("invalid memory model for %<__atomic_exchange%>");
5247 return NULL_RTX;
5250 if (!flag_inline_atomics)
5251 return NULL_RTX;
5253 /* Expand the operands. */
5254 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5255 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5257 return expand_atomic_exchange (target, mem, val, model);
5260 /* Expand the __atomic_compare_exchange intrinsic:
5261 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5262 TYPE desired, BOOL weak,
5263 enum memmodel success,
5264 enum memmodel failure)
5265 EXP is the CALL_EXPR.
5266 TARGET is an optional place for us to store the results. */
5268 static rtx
5269 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5270 rtx target)
5272 rtx expect, desired, mem, oldval;
5273 rtx_code_label *label;
5274 enum memmodel success, failure;
5275 tree weak;
5276 bool is_weak;
5278 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5279 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5281 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5282 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5284 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5285 return NULL_RTX;
5288 if (failure > success)
5290 error ("failure memory model cannot be stronger than success "
5291 "memory model for %<__atomic_compare_exchange%>");
5292 return NULL_RTX;
5295 if (!flag_inline_atomics)
5296 return NULL_RTX;
5298 /* Expand the operands. */
5299 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5301 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5302 expect = convert_memory_address (Pmode, expect);
5303 expect = gen_rtx_MEM (mode, expect);
5304 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5306 weak = CALL_EXPR_ARG (exp, 3);
5307 is_weak = false;
5308 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5309 is_weak = true;
5311 if (target == const0_rtx)
5312 target = NULL;
5314 /* Lest the rtl backend create a race condition with an imporoper store
5315 to memory, always create a new pseudo for OLDVAL. */
5316 oldval = NULL;
5318 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5319 is_weak, success, failure))
5320 return NULL_RTX;
5322 /* Conditionally store back to EXPECT, lest we create a race condition
5323 with an improper store to memory. */
5324 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5325 the normal case where EXPECT is totally private, i.e. a register. At
5326 which point the store can be unconditional. */
5327 label = gen_label_rtx ();
5328 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5329 emit_move_insn (expect, oldval);
5330 emit_label (label);
5332 return target;
5335 /* Expand the __atomic_load intrinsic:
5336 TYPE __atomic_load (TYPE *object, enum memmodel)
5337 EXP is the CALL_EXPR.
5338 TARGET is an optional place for us to store the results. */
5340 static rtx
5341 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5343 rtx mem;
5344 enum memmodel model;
5346 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5347 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5348 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5350 error ("invalid memory model for %<__atomic_load%>");
5351 return NULL_RTX;
5354 if (!flag_inline_atomics)
5355 return NULL_RTX;
5357 /* Expand the operand. */
5358 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5360 return expand_atomic_load (target, mem, model);
5364 /* Expand the __atomic_store intrinsic:
5365 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5366 EXP is the CALL_EXPR.
5367 TARGET is an optional place for us to store the results. */
5369 static rtx
5370 expand_builtin_atomic_store (machine_mode mode, tree exp)
5372 rtx mem, val;
5373 enum memmodel model;
5375 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5376 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5377 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5378 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5380 error ("invalid memory model for %<__atomic_store%>");
5381 return NULL_RTX;
5384 if (!flag_inline_atomics)
5385 return NULL_RTX;
5387 /* Expand the operands. */
5388 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5389 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5391 return expand_atomic_store (mem, val, model, false);
5394 /* Expand the __atomic_fetch_XXX intrinsic:
5395 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5396 EXP is the CALL_EXPR.
5397 TARGET is an optional place for us to store the results.
5398 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5399 FETCH_AFTER is true if returning the result of the operation.
5400 FETCH_AFTER is false if returning the value before the operation.
5401 IGNORE is true if the result is not used.
5402 EXT_CALL is the correct builtin for an external call if this cannot be
5403 resolved to an instruction sequence. */
5405 static rtx
5406 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5407 enum rtx_code code, bool fetch_after,
5408 bool ignore, enum built_in_function ext_call)
5410 rtx val, mem, ret;
5411 enum memmodel model;
5412 tree fndecl;
5413 tree addr;
5415 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5417 /* Expand the operands. */
5418 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5419 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5421 /* Only try generating instructions if inlining is turned on. */
5422 if (flag_inline_atomics)
5424 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5425 if (ret)
5426 return ret;
5429 /* Return if a different routine isn't needed for the library call. */
5430 if (ext_call == BUILT_IN_NONE)
5431 return NULL_RTX;
5433 /* Change the call to the specified function. */
5434 fndecl = get_callee_fndecl (exp);
5435 addr = CALL_EXPR_FN (exp);
5436 STRIP_NOPS (addr);
5438 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5439 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5441 /* Expand the call here so we can emit trailing code. */
5442 ret = expand_call (exp, target, ignore);
5444 /* Replace the original function just in case it matters. */
5445 TREE_OPERAND (addr, 0) = fndecl;
5447 /* Then issue the arithmetic correction to return the right result. */
5448 if (!ignore)
5450 if (code == NOT)
5452 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5453 OPTAB_LIB_WIDEN);
5454 ret = expand_simple_unop (mode, NOT, ret, target, true);
5456 else
5457 ret = expand_simple_binop (mode, code, ret, val, target, true,
5458 OPTAB_LIB_WIDEN);
5460 return ret;
5464 #ifndef HAVE_atomic_clear
5465 # define HAVE_atomic_clear 0
5466 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5467 #endif
5469 /* Expand an atomic clear operation.
5470 void _atomic_clear (BOOL *obj, enum memmodel)
5471 EXP is the call expression. */
5473 static rtx
5474 expand_builtin_atomic_clear (tree exp)
5476 machine_mode mode;
5477 rtx mem, ret;
5478 enum memmodel model;
5480 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5481 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5482 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5484 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5485 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5487 error ("invalid memory model for %<__atomic_store%>");
5488 return const0_rtx;
5491 if (HAVE_atomic_clear)
5493 emit_insn (gen_atomic_clear (mem, model));
5494 return const0_rtx;
5497 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5498 Failing that, a store is issued by __atomic_store. The only way this can
5499 fail is if the bool type is larger than a word size. Unlikely, but
5500 handle it anyway for completeness. Assume a single threaded model since
5501 there is no atomic support in this case, and no barriers are required. */
5502 ret = expand_atomic_store (mem, const0_rtx, model, true);
5503 if (!ret)
5504 emit_move_insn (mem, const0_rtx);
5505 return const0_rtx;
5508 /* Expand an atomic test_and_set operation.
5509 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5510 EXP is the call expression. */
5512 static rtx
5513 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5515 rtx mem;
5516 enum memmodel model;
5517 machine_mode mode;
5519 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5520 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5521 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5523 return expand_atomic_test_and_set (target, mem, model);
5527 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5528 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5530 static tree
5531 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5533 int size;
5534 machine_mode mode;
5535 unsigned int mode_align, type_align;
5537 if (TREE_CODE (arg0) != INTEGER_CST)
5538 return NULL_TREE;
5540 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5541 mode = mode_for_size (size, MODE_INT, 0);
5542 mode_align = GET_MODE_ALIGNMENT (mode);
5544 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5545 type_align = mode_align;
5546 else
5548 tree ttype = TREE_TYPE (arg1);
5550 /* This function is usually invoked and folded immediately by the front
5551 end before anything else has a chance to look at it. The pointer
5552 parameter at this point is usually cast to a void *, so check for that
5553 and look past the cast. */
5554 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5555 && VOID_TYPE_P (TREE_TYPE (ttype)))
5556 arg1 = TREE_OPERAND (arg1, 0);
5558 ttype = TREE_TYPE (arg1);
5559 gcc_assert (POINTER_TYPE_P (ttype));
5561 /* Get the underlying type of the object. */
5562 ttype = TREE_TYPE (ttype);
5563 type_align = TYPE_ALIGN (ttype);
5566 /* If the object has smaller alignment, the the lock free routines cannot
5567 be used. */
5568 if (type_align < mode_align)
5569 return boolean_false_node;
5571 /* Check if a compare_and_swap pattern exists for the mode which represents
5572 the required size. The pattern is not allowed to fail, so the existence
5573 of the pattern indicates support is present. */
5574 if (can_compare_and_swap_p (mode, true))
5575 return boolean_true_node;
5576 else
5577 return boolean_false_node;
5580 /* Return true if the parameters to call EXP represent an object which will
5581 always generate lock free instructions. The first argument represents the
5582 size of the object, and the second parameter is a pointer to the object
5583 itself. If NULL is passed for the object, then the result is based on
5584 typical alignment for an object of the specified size. Otherwise return
5585 false. */
5587 static rtx
5588 expand_builtin_atomic_always_lock_free (tree exp)
5590 tree size;
5591 tree arg0 = CALL_EXPR_ARG (exp, 0);
5592 tree arg1 = CALL_EXPR_ARG (exp, 1);
5594 if (TREE_CODE (arg0) != INTEGER_CST)
5596 error ("non-constant argument 1 to __atomic_always_lock_free");
5597 return const0_rtx;
5600 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5601 if (size == boolean_true_node)
5602 return const1_rtx;
5603 return const0_rtx;
5606 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5607 is lock free on this architecture. */
5609 static tree
5610 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5612 if (!flag_inline_atomics)
5613 return NULL_TREE;
5615 /* If it isn't always lock free, don't generate a result. */
5616 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5617 return boolean_true_node;
5619 return NULL_TREE;
5622 /* Return true if the parameters to call EXP represent an object which will
5623 always generate lock free instructions. The first argument represents the
5624 size of the object, and the second parameter is a pointer to the object
5625 itself. If NULL is passed for the object, then the result is based on
5626 typical alignment for an object of the specified size. Otherwise return
5627 NULL*/
5629 static rtx
5630 expand_builtin_atomic_is_lock_free (tree exp)
5632 tree size;
5633 tree arg0 = CALL_EXPR_ARG (exp, 0);
5634 tree arg1 = CALL_EXPR_ARG (exp, 1);
5636 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5638 error ("non-integer argument 1 to __atomic_is_lock_free");
5639 return NULL_RTX;
5642 if (!flag_inline_atomics)
5643 return NULL_RTX;
5645 /* If the value is known at compile time, return the RTX for it. */
5646 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5647 if (size == boolean_true_node)
5648 return const1_rtx;
5650 return NULL_RTX;
5653 /* Expand the __atomic_thread_fence intrinsic:
5654 void __atomic_thread_fence (enum memmodel)
5655 EXP is the CALL_EXPR. */
5657 static void
5658 expand_builtin_atomic_thread_fence (tree exp)
5660 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5661 expand_mem_thread_fence (model);
5664 /* Expand the __atomic_signal_fence intrinsic:
5665 void __atomic_signal_fence (enum memmodel)
5666 EXP is the CALL_EXPR. */
5668 static void
5669 expand_builtin_atomic_signal_fence (tree exp)
5671 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5672 expand_mem_signal_fence (model);
5675 /* Expand the __sync_synchronize intrinsic. */
5677 static void
5678 expand_builtin_sync_synchronize (void)
5680 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5683 static rtx
5684 expand_builtin_thread_pointer (tree exp, rtx target)
5686 enum insn_code icode;
5687 if (!validate_arglist (exp, VOID_TYPE))
5688 return const0_rtx;
5689 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5690 if (icode != CODE_FOR_nothing)
5692 struct expand_operand op;
5693 /* If the target is not sutitable then create a new target. */
5694 if (target == NULL_RTX
5695 || !REG_P (target)
5696 || GET_MODE (target) != Pmode)
5697 target = gen_reg_rtx (Pmode);
5698 create_output_operand (&op, target, Pmode);
5699 expand_insn (icode, 1, &op);
5700 return target;
5702 error ("__builtin_thread_pointer is not supported on this target");
5703 return const0_rtx;
5706 static void
5707 expand_builtin_set_thread_pointer (tree exp)
5709 enum insn_code icode;
5710 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5711 return;
5712 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5713 if (icode != CODE_FOR_nothing)
5715 struct expand_operand op;
5716 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5717 Pmode, EXPAND_NORMAL);
5718 create_input_operand (&op, val, Pmode);
5719 expand_insn (icode, 1, &op);
5720 return;
5722 error ("__builtin_set_thread_pointer is not supported on this target");
5726 /* Emit code to restore the current value of stack. */
5728 static void
5729 expand_stack_restore (tree var)
5731 rtx_insn *prev;
5732 rtx sa = expand_normal (var);
5734 sa = convert_memory_address (Pmode, sa);
5736 prev = get_last_insn ();
5737 emit_stack_restore (SAVE_BLOCK, sa);
5738 fixup_args_size_notes (prev, get_last_insn (), 0);
5742 /* Emit code to save the current value of stack. */
5744 static rtx
5745 expand_stack_save (void)
5747 rtx ret = NULL_RTX;
5749 do_pending_stack_adjust ();
5750 emit_stack_save (SAVE_BLOCK, &ret);
5751 return ret;
5755 /* Expand OpenACC acc_on_device.
5757 This has to happen late (that is, not in early folding; expand_builtin_*,
5758 rather than fold_builtin_*), as we have to act differently for host and
5759 acceleration device (ACCEL_COMPILER conditional). */
5761 static rtx
5762 expand_builtin_acc_on_device (tree exp, rtx target ATTRIBUTE_UNUSED)
5764 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5765 return NULL_RTX;
5767 tree arg, v1, v2, ret;
5768 location_t loc;
5770 arg = CALL_EXPR_ARG (exp, 0);
5771 arg = builtin_save_expr (arg);
5772 loc = EXPR_LOCATION (exp);
5774 /* Build: (arg == v1 || arg == v2) ? 1 : 0. */
5776 #ifdef ACCEL_COMPILER
5777 v1 = build_int_cst (TREE_TYPE (arg), /* TODO: acc_device_not_host */ 3);
5778 v2 = build_int_cst (TREE_TYPE (arg), ACCEL_COMPILER_acc_device);
5779 #else
5780 v1 = build_int_cst (TREE_TYPE (arg), /* TODO: acc_device_none */ 0);
5781 v2 = build_int_cst (TREE_TYPE (arg), /* TODO: acc_device_host */ 2);
5782 #endif
5784 v1 = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg, v1);
5785 v2 = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg, v2);
5787 /* Can't use TRUTH_ORIF_EXPR, as that is not supported by
5788 expand_expr_real*. */
5789 ret = fold_build3_loc (loc, COND_EXPR, integer_type_node, v1, v1, v2);
5790 ret = fold_build3_loc (loc, COND_EXPR, integer_type_node,
5791 ret, integer_one_node, integer_zero_node);
5793 return expand_normal (ret);
5797 /* Expand an expression EXP that calls a built-in function,
5798 with result going to TARGET if that's convenient
5799 (and in mode MODE if that's convenient).
5800 SUBTARGET may be used as the target for computing one of EXP's operands.
5801 IGNORE is nonzero if the value is to be ignored. */
5804 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5805 int ignore)
5807 tree fndecl = get_callee_fndecl (exp);
5808 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5809 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5810 int flags;
5812 /* When ASan is enabled, we don't want to expand some memory/string
5813 builtins and rely on libsanitizer's hooks. This allows us to avoid
5814 redundant checks and be sure, that possible overflow will be detected
5815 by ASan. */
5817 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5818 return expand_call (exp, target, ignore);
5820 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5821 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5823 /* When not optimizing, generate calls to library functions for a certain
5824 set of builtins. */
5825 if (!optimize
5826 && !called_as_built_in (fndecl)
5827 && fcode != BUILT_IN_FORK
5828 && fcode != BUILT_IN_EXECL
5829 && fcode != BUILT_IN_EXECV
5830 && fcode != BUILT_IN_EXECLP
5831 && fcode != BUILT_IN_EXECLE
5832 && fcode != BUILT_IN_EXECVP
5833 && fcode != BUILT_IN_EXECVE
5834 && fcode != BUILT_IN_ALLOCA
5835 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5836 && fcode != BUILT_IN_FREE)
5837 return expand_call (exp, target, ignore);
5839 /* The built-in function expanders test for target == const0_rtx
5840 to determine whether the function's result will be ignored. */
5841 if (ignore)
5842 target = const0_rtx;
5844 /* If the result of a pure or const built-in function is ignored, and
5845 none of its arguments are volatile, we can avoid expanding the
5846 built-in call and just evaluate the arguments for side-effects. */
5847 if (target == const0_rtx
5848 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5849 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5851 bool volatilep = false;
5852 tree arg;
5853 call_expr_arg_iterator iter;
5855 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5856 if (TREE_THIS_VOLATILE (arg))
5858 volatilep = true;
5859 break;
5862 if (! volatilep)
5864 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5865 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5866 return const0_rtx;
5870 switch (fcode)
5872 CASE_FLT_FN (BUILT_IN_FABS):
5873 case BUILT_IN_FABSD32:
5874 case BUILT_IN_FABSD64:
5875 case BUILT_IN_FABSD128:
5876 target = expand_builtin_fabs (exp, target, subtarget);
5877 if (target)
5878 return target;
5879 break;
5881 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5882 target = expand_builtin_copysign (exp, target, subtarget);
5883 if (target)
5884 return target;
5885 break;
5887 /* Just do a normal library call if we were unable to fold
5888 the values. */
5889 CASE_FLT_FN (BUILT_IN_CABS):
5890 break;
5892 CASE_FLT_FN (BUILT_IN_EXP):
5893 CASE_FLT_FN (BUILT_IN_EXP10):
5894 CASE_FLT_FN (BUILT_IN_POW10):
5895 CASE_FLT_FN (BUILT_IN_EXP2):
5896 CASE_FLT_FN (BUILT_IN_EXPM1):
5897 CASE_FLT_FN (BUILT_IN_LOGB):
5898 CASE_FLT_FN (BUILT_IN_LOG):
5899 CASE_FLT_FN (BUILT_IN_LOG10):
5900 CASE_FLT_FN (BUILT_IN_LOG2):
5901 CASE_FLT_FN (BUILT_IN_LOG1P):
5902 CASE_FLT_FN (BUILT_IN_TAN):
5903 CASE_FLT_FN (BUILT_IN_ASIN):
5904 CASE_FLT_FN (BUILT_IN_ACOS):
5905 CASE_FLT_FN (BUILT_IN_ATAN):
5906 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5907 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5908 because of possible accuracy problems. */
5909 if (! flag_unsafe_math_optimizations)
5910 break;
5911 CASE_FLT_FN (BUILT_IN_SQRT):
5912 CASE_FLT_FN (BUILT_IN_FLOOR):
5913 CASE_FLT_FN (BUILT_IN_CEIL):
5914 CASE_FLT_FN (BUILT_IN_TRUNC):
5915 CASE_FLT_FN (BUILT_IN_ROUND):
5916 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5917 CASE_FLT_FN (BUILT_IN_RINT):
5918 target = expand_builtin_mathfn (exp, target, subtarget);
5919 if (target)
5920 return target;
5921 break;
5923 CASE_FLT_FN (BUILT_IN_FMA):
5924 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5925 if (target)
5926 return target;
5927 break;
5929 CASE_FLT_FN (BUILT_IN_ILOGB):
5930 if (! flag_unsafe_math_optimizations)
5931 break;
5932 CASE_FLT_FN (BUILT_IN_ISINF):
5933 CASE_FLT_FN (BUILT_IN_FINITE):
5934 case BUILT_IN_ISFINITE:
5935 case BUILT_IN_ISNORMAL:
5936 target = expand_builtin_interclass_mathfn (exp, target);
5937 if (target)
5938 return target;
5939 break;
5941 CASE_FLT_FN (BUILT_IN_ICEIL):
5942 CASE_FLT_FN (BUILT_IN_LCEIL):
5943 CASE_FLT_FN (BUILT_IN_LLCEIL):
5944 CASE_FLT_FN (BUILT_IN_LFLOOR):
5945 CASE_FLT_FN (BUILT_IN_IFLOOR):
5946 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5947 target = expand_builtin_int_roundingfn (exp, target);
5948 if (target)
5949 return target;
5950 break;
5952 CASE_FLT_FN (BUILT_IN_IRINT):
5953 CASE_FLT_FN (BUILT_IN_LRINT):
5954 CASE_FLT_FN (BUILT_IN_LLRINT):
5955 CASE_FLT_FN (BUILT_IN_IROUND):
5956 CASE_FLT_FN (BUILT_IN_LROUND):
5957 CASE_FLT_FN (BUILT_IN_LLROUND):
5958 target = expand_builtin_int_roundingfn_2 (exp, target);
5959 if (target)
5960 return target;
5961 break;
5963 CASE_FLT_FN (BUILT_IN_POWI):
5964 target = expand_builtin_powi (exp, target);
5965 if (target)
5966 return target;
5967 break;
5969 CASE_FLT_FN (BUILT_IN_ATAN2):
5970 CASE_FLT_FN (BUILT_IN_LDEXP):
5971 CASE_FLT_FN (BUILT_IN_SCALB):
5972 CASE_FLT_FN (BUILT_IN_SCALBN):
5973 CASE_FLT_FN (BUILT_IN_SCALBLN):
5974 if (! flag_unsafe_math_optimizations)
5975 break;
5977 CASE_FLT_FN (BUILT_IN_FMOD):
5978 CASE_FLT_FN (BUILT_IN_REMAINDER):
5979 CASE_FLT_FN (BUILT_IN_DREM):
5980 CASE_FLT_FN (BUILT_IN_POW):
5981 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5982 if (target)
5983 return target;
5984 break;
5986 CASE_FLT_FN (BUILT_IN_CEXPI):
5987 target = expand_builtin_cexpi (exp, target);
5988 gcc_assert (target);
5989 return target;
5991 CASE_FLT_FN (BUILT_IN_SIN):
5992 CASE_FLT_FN (BUILT_IN_COS):
5993 if (! flag_unsafe_math_optimizations)
5994 break;
5995 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5996 if (target)
5997 return target;
5998 break;
6000 CASE_FLT_FN (BUILT_IN_SINCOS):
6001 if (! flag_unsafe_math_optimizations)
6002 break;
6003 target = expand_builtin_sincos (exp);
6004 if (target)
6005 return target;
6006 break;
6008 case BUILT_IN_APPLY_ARGS:
6009 return expand_builtin_apply_args ();
6011 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6012 FUNCTION with a copy of the parameters described by
6013 ARGUMENTS, and ARGSIZE. It returns a block of memory
6014 allocated on the stack into which is stored all the registers
6015 that might possibly be used for returning the result of a
6016 function. ARGUMENTS is the value returned by
6017 __builtin_apply_args. ARGSIZE is the number of bytes of
6018 arguments that must be copied. ??? How should this value be
6019 computed? We'll also need a safe worst case value for varargs
6020 functions. */
6021 case BUILT_IN_APPLY:
6022 if (!validate_arglist (exp, POINTER_TYPE,
6023 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6024 && !validate_arglist (exp, REFERENCE_TYPE,
6025 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6026 return const0_rtx;
6027 else
6029 rtx ops[3];
6031 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6032 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6033 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6035 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6038 /* __builtin_return (RESULT) causes the function to return the
6039 value described by RESULT. RESULT is address of the block of
6040 memory returned by __builtin_apply. */
6041 case BUILT_IN_RETURN:
6042 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6043 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6044 return const0_rtx;
6046 case BUILT_IN_SAVEREGS:
6047 return expand_builtin_saveregs ();
6049 case BUILT_IN_VA_ARG_PACK:
6050 /* All valid uses of __builtin_va_arg_pack () are removed during
6051 inlining. */
6052 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6053 return const0_rtx;
6055 case BUILT_IN_VA_ARG_PACK_LEN:
6056 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6057 inlining. */
6058 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6059 return const0_rtx;
6061 /* Return the address of the first anonymous stack arg. */
6062 case BUILT_IN_NEXT_ARG:
6063 if (fold_builtin_next_arg (exp, false))
6064 return const0_rtx;
6065 return expand_builtin_next_arg ();
6067 case BUILT_IN_CLEAR_CACHE:
6068 target = expand_builtin___clear_cache (exp);
6069 if (target)
6070 return target;
6071 break;
6073 case BUILT_IN_CLASSIFY_TYPE:
6074 return expand_builtin_classify_type (exp);
6076 case BUILT_IN_CONSTANT_P:
6077 return const0_rtx;
6079 case BUILT_IN_FRAME_ADDRESS:
6080 case BUILT_IN_RETURN_ADDRESS:
6081 return expand_builtin_frame_address (fndecl, exp);
6083 /* Returns the address of the area where the structure is returned.
6084 0 otherwise. */
6085 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6086 if (call_expr_nargs (exp) != 0
6087 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6088 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6089 return const0_rtx;
6090 else
6091 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6093 case BUILT_IN_ALLOCA:
6094 case BUILT_IN_ALLOCA_WITH_ALIGN:
6095 /* If the allocation stems from the declaration of a variable-sized
6096 object, it cannot accumulate. */
6097 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6098 if (target)
6099 return target;
6100 break;
6102 case BUILT_IN_STACK_SAVE:
6103 return expand_stack_save ();
6105 case BUILT_IN_STACK_RESTORE:
6106 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6107 return const0_rtx;
6109 case BUILT_IN_BSWAP16:
6110 case BUILT_IN_BSWAP32:
6111 case BUILT_IN_BSWAP64:
6112 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6113 if (target)
6114 return target;
6115 break;
6117 CASE_INT_FN (BUILT_IN_FFS):
6118 target = expand_builtin_unop (target_mode, exp, target,
6119 subtarget, ffs_optab);
6120 if (target)
6121 return target;
6122 break;
6124 CASE_INT_FN (BUILT_IN_CLZ):
6125 target = expand_builtin_unop (target_mode, exp, target,
6126 subtarget, clz_optab);
6127 if (target)
6128 return target;
6129 break;
6131 CASE_INT_FN (BUILT_IN_CTZ):
6132 target = expand_builtin_unop (target_mode, exp, target,
6133 subtarget, ctz_optab);
6134 if (target)
6135 return target;
6136 break;
6138 CASE_INT_FN (BUILT_IN_CLRSB):
6139 target = expand_builtin_unop (target_mode, exp, target,
6140 subtarget, clrsb_optab);
6141 if (target)
6142 return target;
6143 break;
6145 CASE_INT_FN (BUILT_IN_POPCOUNT):
6146 target = expand_builtin_unop (target_mode, exp, target,
6147 subtarget, popcount_optab);
6148 if (target)
6149 return target;
6150 break;
6152 CASE_INT_FN (BUILT_IN_PARITY):
6153 target = expand_builtin_unop (target_mode, exp, target,
6154 subtarget, parity_optab);
6155 if (target)
6156 return target;
6157 break;
6159 case BUILT_IN_STRLEN:
6160 target = expand_builtin_strlen (exp, target, target_mode);
6161 if (target)
6162 return target;
6163 break;
6165 case BUILT_IN_STRCPY:
6166 target = expand_builtin_strcpy (exp, target);
6167 if (target)
6168 return target;
6169 break;
6171 case BUILT_IN_STRNCPY:
6172 target = expand_builtin_strncpy (exp, target);
6173 if (target)
6174 return target;
6175 break;
6177 case BUILT_IN_STPCPY:
6178 target = expand_builtin_stpcpy (exp, target, mode);
6179 if (target)
6180 return target;
6181 break;
6183 case BUILT_IN_MEMCPY:
6184 target = expand_builtin_memcpy (exp, target);
6185 if (target)
6186 return target;
6187 break;
6189 case BUILT_IN_MEMPCPY:
6190 target = expand_builtin_mempcpy (exp, target, mode);
6191 if (target)
6192 return target;
6193 break;
6195 case BUILT_IN_MEMSET:
6196 target = expand_builtin_memset (exp, target, mode);
6197 if (target)
6198 return target;
6199 break;
6201 case BUILT_IN_BZERO:
6202 target = expand_builtin_bzero (exp);
6203 if (target)
6204 return target;
6205 break;
6207 case BUILT_IN_STRCMP:
6208 target = expand_builtin_strcmp (exp, target);
6209 if (target)
6210 return target;
6211 break;
6213 case BUILT_IN_STRNCMP:
6214 target = expand_builtin_strncmp (exp, target, mode);
6215 if (target)
6216 return target;
6217 break;
6219 case BUILT_IN_BCMP:
6220 case BUILT_IN_MEMCMP:
6221 target = expand_builtin_memcmp (exp, target, mode);
6222 if (target)
6223 return target;
6224 break;
6226 case BUILT_IN_SETJMP:
6227 /* This should have been lowered to the builtins below. */
6228 gcc_unreachable ();
6230 case BUILT_IN_SETJMP_SETUP:
6231 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6232 and the receiver label. */
6233 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6235 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6236 VOIDmode, EXPAND_NORMAL);
6237 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6238 rtx label_r = label_rtx (label);
6240 /* This is copied from the handling of non-local gotos. */
6241 expand_builtin_setjmp_setup (buf_addr, label_r);
6242 nonlocal_goto_handler_labels
6243 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6244 nonlocal_goto_handler_labels);
6245 /* ??? Do not let expand_label treat us as such since we would
6246 not want to be both on the list of non-local labels and on
6247 the list of forced labels. */
6248 FORCED_LABEL (label) = 0;
6249 return const0_rtx;
6251 break;
6253 case BUILT_IN_SETJMP_RECEIVER:
6254 /* __builtin_setjmp_receiver is passed the receiver label. */
6255 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6257 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6258 rtx label_r = label_rtx (label);
6260 expand_builtin_setjmp_receiver (label_r);
6261 return const0_rtx;
6263 break;
6265 /* __builtin_longjmp is passed a pointer to an array of five words.
6266 It's similar to the C library longjmp function but works with
6267 __builtin_setjmp above. */
6268 case BUILT_IN_LONGJMP:
6269 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6271 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6272 VOIDmode, EXPAND_NORMAL);
6273 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6275 if (value != const1_rtx)
6277 error ("%<__builtin_longjmp%> second argument must be 1");
6278 return const0_rtx;
6281 expand_builtin_longjmp (buf_addr, value);
6282 return const0_rtx;
6284 break;
6286 case BUILT_IN_NONLOCAL_GOTO:
6287 target = expand_builtin_nonlocal_goto (exp);
6288 if (target)
6289 return target;
6290 break;
6292 /* This updates the setjmp buffer that is its argument with the value
6293 of the current stack pointer. */
6294 case BUILT_IN_UPDATE_SETJMP_BUF:
6295 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6297 rtx buf_addr
6298 = expand_normal (CALL_EXPR_ARG (exp, 0));
6300 expand_builtin_update_setjmp_buf (buf_addr);
6301 return const0_rtx;
6303 break;
6305 case BUILT_IN_TRAP:
6306 expand_builtin_trap ();
6307 return const0_rtx;
6309 case BUILT_IN_UNREACHABLE:
6310 expand_builtin_unreachable ();
6311 return const0_rtx;
6313 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6314 case BUILT_IN_SIGNBITD32:
6315 case BUILT_IN_SIGNBITD64:
6316 case BUILT_IN_SIGNBITD128:
6317 target = expand_builtin_signbit (exp, target);
6318 if (target)
6319 return target;
6320 break;
6322 /* Various hooks for the DWARF 2 __throw routine. */
6323 case BUILT_IN_UNWIND_INIT:
6324 expand_builtin_unwind_init ();
6325 return const0_rtx;
6326 case BUILT_IN_DWARF_CFA:
6327 return virtual_cfa_rtx;
6328 #ifdef DWARF2_UNWIND_INFO
6329 case BUILT_IN_DWARF_SP_COLUMN:
6330 return expand_builtin_dwarf_sp_column ();
6331 case BUILT_IN_INIT_DWARF_REG_SIZES:
6332 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6333 return const0_rtx;
6334 #endif
6335 case BUILT_IN_FROB_RETURN_ADDR:
6336 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6337 case BUILT_IN_EXTRACT_RETURN_ADDR:
6338 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6339 case BUILT_IN_EH_RETURN:
6340 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6341 CALL_EXPR_ARG (exp, 1));
6342 return const0_rtx;
6343 #ifdef EH_RETURN_DATA_REGNO
6344 case BUILT_IN_EH_RETURN_DATA_REGNO:
6345 return expand_builtin_eh_return_data_regno (exp);
6346 #endif
6347 case BUILT_IN_EXTEND_POINTER:
6348 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6349 case BUILT_IN_EH_POINTER:
6350 return expand_builtin_eh_pointer (exp);
6351 case BUILT_IN_EH_FILTER:
6352 return expand_builtin_eh_filter (exp);
6353 case BUILT_IN_EH_COPY_VALUES:
6354 return expand_builtin_eh_copy_values (exp);
6356 case BUILT_IN_VA_START:
6357 return expand_builtin_va_start (exp);
6358 case BUILT_IN_VA_END:
6359 return expand_builtin_va_end (exp);
6360 case BUILT_IN_VA_COPY:
6361 return expand_builtin_va_copy (exp);
6362 case BUILT_IN_EXPECT:
6363 return expand_builtin_expect (exp, target);
6364 case BUILT_IN_ASSUME_ALIGNED:
6365 return expand_builtin_assume_aligned (exp, target);
6366 case BUILT_IN_PREFETCH:
6367 expand_builtin_prefetch (exp);
6368 return const0_rtx;
6370 case BUILT_IN_INIT_TRAMPOLINE:
6371 return expand_builtin_init_trampoline (exp, true);
6372 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6373 return expand_builtin_init_trampoline (exp, false);
6374 case BUILT_IN_ADJUST_TRAMPOLINE:
6375 return expand_builtin_adjust_trampoline (exp);
6377 case BUILT_IN_FORK:
6378 case BUILT_IN_EXECL:
6379 case BUILT_IN_EXECV:
6380 case BUILT_IN_EXECLP:
6381 case BUILT_IN_EXECLE:
6382 case BUILT_IN_EXECVP:
6383 case BUILT_IN_EXECVE:
6384 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6385 if (target)
6386 return target;
6387 break;
6389 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6390 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6391 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6392 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6393 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6394 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6395 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6396 if (target)
6397 return target;
6398 break;
6400 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6401 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6402 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6403 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6404 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6405 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6406 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6407 if (target)
6408 return target;
6409 break;
6411 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6412 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6413 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6414 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6415 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6416 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6417 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6418 if (target)
6419 return target;
6420 break;
6422 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6423 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6424 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6425 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6426 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6427 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6428 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6429 if (target)
6430 return target;
6431 break;
6433 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6434 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6435 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6436 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6437 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6438 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6439 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6440 if (target)
6441 return target;
6442 break;
6444 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6445 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6446 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6447 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6448 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6449 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6450 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6451 if (target)
6452 return target;
6453 break;
6455 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6456 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6457 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6458 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6459 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6460 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6461 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6462 if (target)
6463 return target;
6464 break;
6466 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6467 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6468 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6469 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6470 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6471 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6472 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6473 if (target)
6474 return target;
6475 break;
6477 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6478 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6479 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6480 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6481 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6482 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6483 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6484 if (target)
6485 return target;
6486 break;
6488 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6489 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6490 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6491 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6492 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6493 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6494 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6495 if (target)
6496 return target;
6497 break;
6499 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6500 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6501 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6502 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6503 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6504 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6505 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6506 if (target)
6507 return target;
6508 break;
6510 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6511 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6512 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6513 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6514 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6515 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6516 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6517 if (target)
6518 return target;
6519 break;
6521 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6522 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6523 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6524 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6525 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6526 if (mode == VOIDmode)
6527 mode = TYPE_MODE (boolean_type_node);
6528 if (!target || !register_operand (target, mode))
6529 target = gen_reg_rtx (mode);
6531 mode = get_builtin_sync_mode
6532 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6533 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6534 if (target)
6535 return target;
6536 break;
6538 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6539 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6540 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6541 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6542 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6543 mode = get_builtin_sync_mode
6544 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6545 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6546 if (target)
6547 return target;
6548 break;
6550 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6551 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6552 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6553 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6554 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6555 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6556 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6557 if (target)
6558 return target;
6559 break;
6561 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6562 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6563 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6564 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6565 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6566 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6567 expand_builtin_sync_lock_release (mode, exp);
6568 return const0_rtx;
6570 case BUILT_IN_SYNC_SYNCHRONIZE:
6571 expand_builtin_sync_synchronize ();
6572 return const0_rtx;
6574 case BUILT_IN_ATOMIC_EXCHANGE_1:
6575 case BUILT_IN_ATOMIC_EXCHANGE_2:
6576 case BUILT_IN_ATOMIC_EXCHANGE_4:
6577 case BUILT_IN_ATOMIC_EXCHANGE_8:
6578 case BUILT_IN_ATOMIC_EXCHANGE_16:
6579 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6580 target = expand_builtin_atomic_exchange (mode, exp, target);
6581 if (target)
6582 return target;
6583 break;
6585 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6586 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6587 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6588 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6589 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6591 unsigned int nargs, z;
6592 vec<tree, va_gc> *vec;
6594 mode =
6595 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6596 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6597 if (target)
6598 return target;
6600 /* If this is turned into an external library call, the weak parameter
6601 must be dropped to match the expected parameter list. */
6602 nargs = call_expr_nargs (exp);
6603 vec_alloc (vec, nargs - 1);
6604 for (z = 0; z < 3; z++)
6605 vec->quick_push (CALL_EXPR_ARG (exp, z));
6606 /* Skip the boolean weak parameter. */
6607 for (z = 4; z < 6; z++)
6608 vec->quick_push (CALL_EXPR_ARG (exp, z));
6609 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6610 break;
6613 case BUILT_IN_ATOMIC_LOAD_1:
6614 case BUILT_IN_ATOMIC_LOAD_2:
6615 case BUILT_IN_ATOMIC_LOAD_4:
6616 case BUILT_IN_ATOMIC_LOAD_8:
6617 case BUILT_IN_ATOMIC_LOAD_16:
6618 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6619 target = expand_builtin_atomic_load (mode, exp, target);
6620 if (target)
6621 return target;
6622 break;
6624 case BUILT_IN_ATOMIC_STORE_1:
6625 case BUILT_IN_ATOMIC_STORE_2:
6626 case BUILT_IN_ATOMIC_STORE_4:
6627 case BUILT_IN_ATOMIC_STORE_8:
6628 case BUILT_IN_ATOMIC_STORE_16:
6629 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6630 target = expand_builtin_atomic_store (mode, exp);
6631 if (target)
6632 return const0_rtx;
6633 break;
6635 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6636 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6637 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6638 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6639 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6641 enum built_in_function lib;
6642 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6643 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6644 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6645 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6646 ignore, lib);
6647 if (target)
6648 return target;
6649 break;
6651 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6652 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6653 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6654 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6655 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6657 enum built_in_function lib;
6658 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6659 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6660 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6661 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6662 ignore, lib);
6663 if (target)
6664 return target;
6665 break;
6667 case BUILT_IN_ATOMIC_AND_FETCH_1:
6668 case BUILT_IN_ATOMIC_AND_FETCH_2:
6669 case BUILT_IN_ATOMIC_AND_FETCH_4:
6670 case BUILT_IN_ATOMIC_AND_FETCH_8:
6671 case BUILT_IN_ATOMIC_AND_FETCH_16:
6673 enum built_in_function lib;
6674 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6675 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6676 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6677 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6678 ignore, lib);
6679 if (target)
6680 return target;
6681 break;
6683 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6684 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6685 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6686 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6687 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6689 enum built_in_function lib;
6690 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6691 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6692 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6693 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6694 ignore, lib);
6695 if (target)
6696 return target;
6697 break;
6699 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6700 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6701 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6702 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6703 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6705 enum built_in_function lib;
6706 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6707 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6708 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6709 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6710 ignore, lib);
6711 if (target)
6712 return target;
6713 break;
6715 case BUILT_IN_ATOMIC_OR_FETCH_1:
6716 case BUILT_IN_ATOMIC_OR_FETCH_2:
6717 case BUILT_IN_ATOMIC_OR_FETCH_4:
6718 case BUILT_IN_ATOMIC_OR_FETCH_8:
6719 case BUILT_IN_ATOMIC_OR_FETCH_16:
6721 enum built_in_function lib;
6722 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6723 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6724 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6725 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6726 ignore, lib);
6727 if (target)
6728 return target;
6729 break;
6731 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6732 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6733 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6734 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6735 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6736 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6737 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6738 ignore, BUILT_IN_NONE);
6739 if (target)
6740 return target;
6741 break;
6743 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6744 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6745 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6746 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6747 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6748 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6749 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6750 ignore, BUILT_IN_NONE);
6751 if (target)
6752 return target;
6753 break;
6755 case BUILT_IN_ATOMIC_FETCH_AND_1:
6756 case BUILT_IN_ATOMIC_FETCH_AND_2:
6757 case BUILT_IN_ATOMIC_FETCH_AND_4:
6758 case BUILT_IN_ATOMIC_FETCH_AND_8:
6759 case BUILT_IN_ATOMIC_FETCH_AND_16:
6760 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6761 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6762 ignore, BUILT_IN_NONE);
6763 if (target)
6764 return target;
6765 break;
6767 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6768 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6769 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6770 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6771 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6772 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6773 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6774 ignore, BUILT_IN_NONE);
6775 if (target)
6776 return target;
6777 break;
6779 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6780 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6781 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6782 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6783 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6784 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6785 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6786 ignore, BUILT_IN_NONE);
6787 if (target)
6788 return target;
6789 break;
6791 case BUILT_IN_ATOMIC_FETCH_OR_1:
6792 case BUILT_IN_ATOMIC_FETCH_OR_2:
6793 case BUILT_IN_ATOMIC_FETCH_OR_4:
6794 case BUILT_IN_ATOMIC_FETCH_OR_8:
6795 case BUILT_IN_ATOMIC_FETCH_OR_16:
6796 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6797 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6798 ignore, BUILT_IN_NONE);
6799 if (target)
6800 return target;
6801 break;
6803 case BUILT_IN_ATOMIC_TEST_AND_SET:
6804 return expand_builtin_atomic_test_and_set (exp, target);
6806 case BUILT_IN_ATOMIC_CLEAR:
6807 return expand_builtin_atomic_clear (exp);
6809 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6810 return expand_builtin_atomic_always_lock_free (exp);
6812 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6813 target = expand_builtin_atomic_is_lock_free (exp);
6814 if (target)
6815 return target;
6816 break;
6818 case BUILT_IN_ATOMIC_THREAD_FENCE:
6819 expand_builtin_atomic_thread_fence (exp);
6820 return const0_rtx;
6822 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6823 expand_builtin_atomic_signal_fence (exp);
6824 return const0_rtx;
6826 case BUILT_IN_OBJECT_SIZE:
6827 return expand_builtin_object_size (exp);
6829 case BUILT_IN_MEMCPY_CHK:
6830 case BUILT_IN_MEMPCPY_CHK:
6831 case BUILT_IN_MEMMOVE_CHK:
6832 case BUILT_IN_MEMSET_CHK:
6833 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6834 if (target)
6835 return target;
6836 break;
6838 case BUILT_IN_STRCPY_CHK:
6839 case BUILT_IN_STPCPY_CHK:
6840 case BUILT_IN_STRNCPY_CHK:
6841 case BUILT_IN_STPNCPY_CHK:
6842 case BUILT_IN_STRCAT_CHK:
6843 case BUILT_IN_STRNCAT_CHK:
6844 case BUILT_IN_SNPRINTF_CHK:
6845 case BUILT_IN_VSNPRINTF_CHK:
6846 maybe_emit_chk_warning (exp, fcode);
6847 break;
6849 case BUILT_IN_SPRINTF_CHK:
6850 case BUILT_IN_VSPRINTF_CHK:
6851 maybe_emit_sprintf_chk_warning (exp, fcode);
6852 break;
6854 case BUILT_IN_FREE:
6855 if (warn_free_nonheap_object)
6856 maybe_emit_free_warning (exp);
6857 break;
6859 case BUILT_IN_THREAD_POINTER:
6860 return expand_builtin_thread_pointer (exp, target);
6862 case BUILT_IN_SET_THREAD_POINTER:
6863 expand_builtin_set_thread_pointer (exp);
6864 return const0_rtx;
6866 case BUILT_IN_CILK_DETACH:
6867 expand_builtin_cilk_detach (exp);
6868 return const0_rtx;
6870 case BUILT_IN_CILK_POP_FRAME:
6871 expand_builtin_cilk_pop_frame (exp);
6872 return const0_rtx;
6874 case BUILT_IN_ACC_ON_DEVICE:
6875 target = expand_builtin_acc_on_device (exp, target);
6876 if (target)
6877 return target;
6878 break;
6880 default: /* just do library call, if unknown builtin */
6881 break;
6884 /* The switch statement above can drop through to cause the function
6885 to be called normally. */
6886 return expand_call (exp, target, ignore);
6889 /* Determine whether a tree node represents a call to a built-in
6890 function. If the tree T is a call to a built-in function with
6891 the right number of arguments of the appropriate types, return
6892 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6893 Otherwise the return value is END_BUILTINS. */
6895 enum built_in_function
6896 builtin_mathfn_code (const_tree t)
6898 const_tree fndecl, arg, parmlist;
6899 const_tree argtype, parmtype;
6900 const_call_expr_arg_iterator iter;
6902 if (TREE_CODE (t) != CALL_EXPR
6903 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6904 return END_BUILTINS;
6906 fndecl = get_callee_fndecl (t);
6907 if (fndecl == NULL_TREE
6908 || TREE_CODE (fndecl) != FUNCTION_DECL
6909 || ! DECL_BUILT_IN (fndecl)
6910 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6911 return END_BUILTINS;
6913 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6914 init_const_call_expr_arg_iterator (t, &iter);
6915 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6917 /* If a function doesn't take a variable number of arguments,
6918 the last element in the list will have type `void'. */
6919 parmtype = TREE_VALUE (parmlist);
6920 if (VOID_TYPE_P (parmtype))
6922 if (more_const_call_expr_args_p (&iter))
6923 return END_BUILTINS;
6924 return DECL_FUNCTION_CODE (fndecl);
6927 if (! more_const_call_expr_args_p (&iter))
6928 return END_BUILTINS;
6930 arg = next_const_call_expr_arg (&iter);
6931 argtype = TREE_TYPE (arg);
6933 if (SCALAR_FLOAT_TYPE_P (parmtype))
6935 if (! SCALAR_FLOAT_TYPE_P (argtype))
6936 return END_BUILTINS;
6938 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6940 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6941 return END_BUILTINS;
6943 else if (POINTER_TYPE_P (parmtype))
6945 if (! POINTER_TYPE_P (argtype))
6946 return END_BUILTINS;
6948 else if (INTEGRAL_TYPE_P (parmtype))
6950 if (! INTEGRAL_TYPE_P (argtype))
6951 return END_BUILTINS;
6953 else
6954 return END_BUILTINS;
6957 /* Variable-length argument list. */
6958 return DECL_FUNCTION_CODE (fndecl);
6961 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6962 evaluate to a constant. */
6964 static tree
6965 fold_builtin_constant_p (tree arg)
6967 /* We return 1 for a numeric type that's known to be a constant
6968 value at compile-time or for an aggregate type that's a
6969 literal constant. */
6970 STRIP_NOPS (arg);
6972 /* If we know this is a constant, emit the constant of one. */
6973 if (CONSTANT_CLASS_P (arg)
6974 || (TREE_CODE (arg) == CONSTRUCTOR
6975 && TREE_CONSTANT (arg)))
6976 return integer_one_node;
6977 if (TREE_CODE (arg) == ADDR_EXPR)
6979 tree op = TREE_OPERAND (arg, 0);
6980 if (TREE_CODE (op) == STRING_CST
6981 || (TREE_CODE (op) == ARRAY_REF
6982 && integer_zerop (TREE_OPERAND (op, 1))
6983 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6984 return integer_one_node;
6987 /* If this expression has side effects, show we don't know it to be a
6988 constant. Likewise if it's a pointer or aggregate type since in
6989 those case we only want literals, since those are only optimized
6990 when generating RTL, not later.
6991 And finally, if we are compiling an initializer, not code, we
6992 need to return a definite result now; there's not going to be any
6993 more optimization done. */
6994 if (TREE_SIDE_EFFECTS (arg)
6995 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6996 || POINTER_TYPE_P (TREE_TYPE (arg))
6997 || cfun == 0
6998 || folding_initializer
6999 || force_folding_builtin_constant_p)
7000 return integer_zero_node;
7002 return NULL_TREE;
7005 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7006 return it as a truthvalue. */
7008 static tree
7009 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7010 tree predictor)
7012 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7014 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7015 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7016 ret_type = TREE_TYPE (TREE_TYPE (fn));
7017 pred_type = TREE_VALUE (arg_types);
7018 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7020 pred = fold_convert_loc (loc, pred_type, pred);
7021 expected = fold_convert_loc (loc, expected_type, expected);
7022 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7023 predictor);
7025 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7026 build_int_cst (ret_type, 0));
7029 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7030 NULL_TREE if no simplification is possible. */
7032 tree
7033 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7035 tree inner, fndecl, inner_arg0;
7036 enum tree_code code;
7038 /* Distribute the expected value over short-circuiting operators.
7039 See through the cast from truthvalue_type_node to long. */
7040 inner_arg0 = arg0;
7041 while (TREE_CODE (inner_arg0) == NOP_EXPR
7042 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7043 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7044 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7046 /* If this is a builtin_expect within a builtin_expect keep the
7047 inner one. See through a comparison against a constant. It
7048 might have been added to create a thruthvalue. */
7049 inner = inner_arg0;
7051 if (COMPARISON_CLASS_P (inner)
7052 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7053 inner = TREE_OPERAND (inner, 0);
7055 if (TREE_CODE (inner) == CALL_EXPR
7056 && (fndecl = get_callee_fndecl (inner))
7057 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7058 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7059 return arg0;
7061 inner = inner_arg0;
7062 code = TREE_CODE (inner);
7063 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7065 tree op0 = TREE_OPERAND (inner, 0);
7066 tree op1 = TREE_OPERAND (inner, 1);
7068 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7069 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7070 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7072 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7075 /* If the argument isn't invariant then there's nothing else we can do. */
7076 if (!TREE_CONSTANT (inner_arg0))
7077 return NULL_TREE;
7079 /* If we expect that a comparison against the argument will fold to
7080 a constant return the constant. In practice, this means a true
7081 constant or the address of a non-weak symbol. */
7082 inner = inner_arg0;
7083 STRIP_NOPS (inner);
7084 if (TREE_CODE (inner) == ADDR_EXPR)
7088 inner = TREE_OPERAND (inner, 0);
7090 while (TREE_CODE (inner) == COMPONENT_REF
7091 || TREE_CODE (inner) == ARRAY_REF);
7092 if ((TREE_CODE (inner) == VAR_DECL
7093 || TREE_CODE (inner) == FUNCTION_DECL)
7094 && DECL_WEAK (inner))
7095 return NULL_TREE;
7098 /* Otherwise, ARG0 already has the proper type for the return value. */
7099 return arg0;
7102 /* Fold a call to __builtin_classify_type with argument ARG. */
7104 static tree
7105 fold_builtin_classify_type (tree arg)
7107 if (arg == 0)
7108 return build_int_cst (integer_type_node, no_type_class);
7110 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7113 /* Fold a call to __builtin_strlen with argument ARG. */
7115 static tree
7116 fold_builtin_strlen (location_t loc, tree type, tree arg)
7118 if (!validate_arg (arg, POINTER_TYPE))
7119 return NULL_TREE;
7120 else
7122 tree len = c_strlen (arg, 0);
7124 if (len)
7125 return fold_convert_loc (loc, type, len);
7127 return NULL_TREE;
7131 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7133 static tree
7134 fold_builtin_inf (location_t loc, tree type, int warn)
7136 REAL_VALUE_TYPE real;
7138 /* __builtin_inff is intended to be usable to define INFINITY on all
7139 targets. If an infinity is not available, INFINITY expands "to a
7140 positive constant of type float that overflows at translation
7141 time", footnote "In this case, using INFINITY will violate the
7142 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7143 Thus we pedwarn to ensure this constraint violation is
7144 diagnosed. */
7145 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7146 pedwarn (loc, 0, "target format does not support infinity");
7148 real_inf (&real);
7149 return build_real (type, real);
7152 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7154 static tree
7155 fold_builtin_nan (tree arg, tree type, int quiet)
7157 REAL_VALUE_TYPE real;
7158 const char *str;
7160 if (!validate_arg (arg, POINTER_TYPE))
7161 return NULL_TREE;
7162 str = c_getstr (arg);
7163 if (!str)
7164 return NULL_TREE;
7166 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7167 return NULL_TREE;
7169 return build_real (type, real);
7172 /* Return true if the floating point expression T has an integer value.
7173 We also allow +Inf, -Inf and NaN to be considered integer values. */
7175 static bool
7176 integer_valued_real_p (tree t)
7178 switch (TREE_CODE (t))
7180 case FLOAT_EXPR:
7181 return true;
7183 case ABS_EXPR:
7184 case SAVE_EXPR:
7185 return integer_valued_real_p (TREE_OPERAND (t, 0));
7187 case COMPOUND_EXPR:
7188 case MODIFY_EXPR:
7189 case BIND_EXPR:
7190 return integer_valued_real_p (TREE_OPERAND (t, 1));
7192 case PLUS_EXPR:
7193 case MINUS_EXPR:
7194 case MULT_EXPR:
7195 case MIN_EXPR:
7196 case MAX_EXPR:
7197 return integer_valued_real_p (TREE_OPERAND (t, 0))
7198 && integer_valued_real_p (TREE_OPERAND (t, 1));
7200 case COND_EXPR:
7201 return integer_valued_real_p (TREE_OPERAND (t, 1))
7202 && integer_valued_real_p (TREE_OPERAND (t, 2));
7204 case REAL_CST:
7205 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7207 case NOP_EXPR:
7209 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7210 if (TREE_CODE (type) == INTEGER_TYPE)
7211 return true;
7212 if (TREE_CODE (type) == REAL_TYPE)
7213 return integer_valued_real_p (TREE_OPERAND (t, 0));
7214 break;
7217 case CALL_EXPR:
7218 switch (builtin_mathfn_code (t))
7220 CASE_FLT_FN (BUILT_IN_CEIL):
7221 CASE_FLT_FN (BUILT_IN_FLOOR):
7222 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7223 CASE_FLT_FN (BUILT_IN_RINT):
7224 CASE_FLT_FN (BUILT_IN_ROUND):
7225 CASE_FLT_FN (BUILT_IN_TRUNC):
7226 return true;
7228 CASE_FLT_FN (BUILT_IN_FMIN):
7229 CASE_FLT_FN (BUILT_IN_FMAX):
7230 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7231 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7233 default:
7234 break;
7236 break;
7238 default:
7239 break;
7241 return false;
7244 /* FNDECL is assumed to be a builtin where truncation can be propagated
7245 across (for instance floor((double)f) == (double)floorf (f).
7246 Do the transformation for a call with argument ARG. */
7248 static tree
7249 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7251 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7253 if (!validate_arg (arg, REAL_TYPE))
7254 return NULL_TREE;
7256 /* Integer rounding functions are idempotent. */
7257 if (fcode == builtin_mathfn_code (arg))
7258 return arg;
7260 /* If argument is already integer valued, and we don't need to worry
7261 about setting errno, there's no need to perform rounding. */
7262 if (! flag_errno_math && integer_valued_real_p (arg))
7263 return arg;
7265 if (optimize)
7267 tree arg0 = strip_float_extensions (arg);
7268 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7269 tree newtype = TREE_TYPE (arg0);
7270 tree decl;
7272 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7273 && (decl = mathfn_built_in (newtype, fcode)))
7274 return fold_convert_loc (loc, ftype,
7275 build_call_expr_loc (loc, decl, 1,
7276 fold_convert_loc (loc,
7277 newtype,
7278 arg0)));
7280 return NULL_TREE;
7283 /* FNDECL is assumed to be builtin which can narrow the FP type of
7284 the argument, for instance lround((double)f) -> lroundf (f).
7285 Do the transformation for a call with argument ARG. */
7287 static tree
7288 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7290 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7292 if (!validate_arg (arg, REAL_TYPE))
7293 return NULL_TREE;
7295 /* If argument is already integer valued, and we don't need to worry
7296 about setting errno, there's no need to perform rounding. */
7297 if (! flag_errno_math && integer_valued_real_p (arg))
7298 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7299 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7301 if (optimize)
7303 tree ftype = TREE_TYPE (arg);
7304 tree arg0 = strip_float_extensions (arg);
7305 tree newtype = TREE_TYPE (arg0);
7306 tree decl;
7308 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7309 && (decl = mathfn_built_in (newtype, fcode)))
7310 return build_call_expr_loc (loc, decl, 1,
7311 fold_convert_loc (loc, newtype, arg0));
7314 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7315 sizeof (int) == sizeof (long). */
7316 if (TYPE_PRECISION (integer_type_node)
7317 == TYPE_PRECISION (long_integer_type_node))
7319 tree newfn = NULL_TREE;
7320 switch (fcode)
7322 CASE_FLT_FN (BUILT_IN_ICEIL):
7323 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7324 break;
7326 CASE_FLT_FN (BUILT_IN_IFLOOR):
7327 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7328 break;
7330 CASE_FLT_FN (BUILT_IN_IROUND):
7331 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7332 break;
7334 CASE_FLT_FN (BUILT_IN_IRINT):
7335 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7336 break;
7338 default:
7339 break;
7342 if (newfn)
7344 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7345 return fold_convert_loc (loc,
7346 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7350 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7351 sizeof (long long) == sizeof (long). */
7352 if (TYPE_PRECISION (long_long_integer_type_node)
7353 == TYPE_PRECISION (long_integer_type_node))
7355 tree newfn = NULL_TREE;
7356 switch (fcode)
7358 CASE_FLT_FN (BUILT_IN_LLCEIL):
7359 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7360 break;
7362 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7363 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7364 break;
7366 CASE_FLT_FN (BUILT_IN_LLROUND):
7367 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7368 break;
7370 CASE_FLT_FN (BUILT_IN_LLRINT):
7371 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7372 break;
7374 default:
7375 break;
7378 if (newfn)
7380 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7381 return fold_convert_loc (loc,
7382 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7386 return NULL_TREE;
7389 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7390 return type. Return NULL_TREE if no simplification can be made. */
7392 static tree
7393 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7395 tree res;
7397 if (!validate_arg (arg, COMPLEX_TYPE)
7398 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7399 return NULL_TREE;
7401 /* Calculate the result when the argument is a constant. */
7402 if (TREE_CODE (arg) == COMPLEX_CST
7403 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7404 type, mpfr_hypot)))
7405 return res;
7407 if (TREE_CODE (arg) == COMPLEX_EXPR)
7409 tree real = TREE_OPERAND (arg, 0);
7410 tree imag = TREE_OPERAND (arg, 1);
7412 /* If either part is zero, cabs is fabs of the other. */
7413 if (real_zerop (real))
7414 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7415 if (real_zerop (imag))
7416 return fold_build1_loc (loc, ABS_EXPR, type, real);
7418 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7419 if (flag_unsafe_math_optimizations
7420 && operand_equal_p (real, imag, OEP_PURE_SAME))
7422 const REAL_VALUE_TYPE sqrt2_trunc
7423 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7424 STRIP_NOPS (real);
7425 return fold_build2_loc (loc, MULT_EXPR, type,
7426 fold_build1_loc (loc, ABS_EXPR, type, real),
7427 build_real (type, sqrt2_trunc));
7431 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7432 if (TREE_CODE (arg) == NEGATE_EXPR
7433 || TREE_CODE (arg) == CONJ_EXPR)
7434 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7436 /* Don't do this when optimizing for size. */
7437 if (flag_unsafe_math_optimizations
7438 && optimize && optimize_function_for_speed_p (cfun))
7440 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7442 if (sqrtfn != NULL_TREE)
7444 tree rpart, ipart, result;
7446 arg = builtin_save_expr (arg);
7448 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7449 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7451 rpart = builtin_save_expr (rpart);
7452 ipart = builtin_save_expr (ipart);
7454 result = fold_build2_loc (loc, PLUS_EXPR, type,
7455 fold_build2_loc (loc, MULT_EXPR, type,
7456 rpart, rpart),
7457 fold_build2_loc (loc, MULT_EXPR, type,
7458 ipart, ipart));
7460 return build_call_expr_loc (loc, sqrtfn, 1, result);
7464 return NULL_TREE;
7467 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7468 complex tree type of the result. If NEG is true, the imaginary
7469 zero is negative. */
7471 static tree
7472 build_complex_cproj (tree type, bool neg)
7474 REAL_VALUE_TYPE rinf, rzero = dconst0;
7476 real_inf (&rinf);
7477 rzero.sign = neg;
7478 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7479 build_real (TREE_TYPE (type), rzero));
7482 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7483 return type. Return NULL_TREE if no simplification can be made. */
7485 static tree
7486 fold_builtin_cproj (location_t loc, tree arg, tree type)
7488 if (!validate_arg (arg, COMPLEX_TYPE)
7489 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7490 return NULL_TREE;
7492 /* If there are no infinities, return arg. */
7493 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7494 return non_lvalue_loc (loc, arg);
7496 /* Calculate the result when the argument is a constant. */
7497 if (TREE_CODE (arg) == COMPLEX_CST)
7499 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7500 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7502 if (real_isinf (real) || real_isinf (imag))
7503 return build_complex_cproj (type, imag->sign);
7504 else
7505 return arg;
7507 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7509 tree real = TREE_OPERAND (arg, 0);
7510 tree imag = TREE_OPERAND (arg, 1);
7512 STRIP_NOPS (real);
7513 STRIP_NOPS (imag);
7515 /* If the real part is inf and the imag part is known to be
7516 nonnegative, return (inf + 0i). Remember side-effects are
7517 possible in the imag part. */
7518 if (TREE_CODE (real) == REAL_CST
7519 && real_isinf (TREE_REAL_CST_PTR (real))
7520 && tree_expr_nonnegative_p (imag))
7521 return omit_one_operand_loc (loc, type,
7522 build_complex_cproj (type, false),
7523 arg);
7525 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7526 Remember side-effects are possible in the real part. */
7527 if (TREE_CODE (imag) == REAL_CST
7528 && real_isinf (TREE_REAL_CST_PTR (imag)))
7529 return
7530 omit_one_operand_loc (loc, type,
7531 build_complex_cproj (type, TREE_REAL_CST_PTR
7532 (imag)->sign), arg);
7535 return NULL_TREE;
7538 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7539 Return NULL_TREE if no simplification can be made. */
7541 static tree
7542 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7545 enum built_in_function fcode;
7546 tree res;
7548 if (!validate_arg (arg, REAL_TYPE))
7549 return NULL_TREE;
7551 /* Calculate the result when the argument is a constant. */
7552 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7553 return res;
7555 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7556 fcode = builtin_mathfn_code (arg);
7557 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7559 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7560 arg = fold_build2_loc (loc, MULT_EXPR, type,
7561 CALL_EXPR_ARG (arg, 0),
7562 build_real (type, dconsthalf));
7563 return build_call_expr_loc (loc, expfn, 1, arg);
7566 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7567 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7569 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7571 if (powfn)
7573 tree arg0 = CALL_EXPR_ARG (arg, 0);
7574 tree tree_root;
7575 /* The inner root was either sqrt or cbrt. */
7576 /* This was a conditional expression but it triggered a bug
7577 in Sun C 5.5. */
7578 REAL_VALUE_TYPE dconstroot;
7579 if (BUILTIN_SQRT_P (fcode))
7580 dconstroot = dconsthalf;
7581 else
7582 dconstroot = dconst_third ();
7584 /* Adjust for the outer root. */
7585 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7586 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7587 tree_root = build_real (type, dconstroot);
7588 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7592 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7593 if (flag_unsafe_math_optimizations
7594 && (fcode == BUILT_IN_POW
7595 || fcode == BUILT_IN_POWF
7596 || fcode == BUILT_IN_POWL))
7598 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7599 tree arg0 = CALL_EXPR_ARG (arg, 0);
7600 tree arg1 = CALL_EXPR_ARG (arg, 1);
7601 tree narg1;
7602 if (!tree_expr_nonnegative_p (arg0))
7603 arg0 = build1 (ABS_EXPR, type, arg0);
7604 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7605 build_real (type, dconsthalf));
7606 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7609 return NULL_TREE;
7612 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7613 Return NULL_TREE if no simplification can be made. */
7615 static tree
7616 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7618 const enum built_in_function fcode = builtin_mathfn_code (arg);
7619 tree res;
7621 if (!validate_arg (arg, REAL_TYPE))
7622 return NULL_TREE;
7624 /* Calculate the result when the argument is a constant. */
7625 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7626 return res;
7628 if (flag_unsafe_math_optimizations)
7630 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7631 if (BUILTIN_EXPONENT_P (fcode))
7633 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7634 const REAL_VALUE_TYPE third_trunc =
7635 real_value_truncate (TYPE_MODE (type), dconst_third ());
7636 arg = fold_build2_loc (loc, MULT_EXPR, type,
7637 CALL_EXPR_ARG (arg, 0),
7638 build_real (type, third_trunc));
7639 return build_call_expr_loc (loc, expfn, 1, arg);
7642 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7643 if (BUILTIN_SQRT_P (fcode))
7645 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7647 if (powfn)
7649 tree arg0 = CALL_EXPR_ARG (arg, 0);
7650 tree tree_root;
7651 REAL_VALUE_TYPE dconstroot = dconst_third ();
7653 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7654 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7655 tree_root = build_real (type, dconstroot);
7656 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7660 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7661 if (BUILTIN_CBRT_P (fcode))
7663 tree arg0 = CALL_EXPR_ARG (arg, 0);
7664 if (tree_expr_nonnegative_p (arg0))
7666 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7668 if (powfn)
7670 tree tree_root;
7671 REAL_VALUE_TYPE dconstroot;
7673 real_arithmetic (&dconstroot, MULT_EXPR,
7674 dconst_third_ptr (), dconst_third_ptr ());
7675 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7676 tree_root = build_real (type, dconstroot);
7677 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7682 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7683 if (fcode == BUILT_IN_POW
7684 || fcode == BUILT_IN_POWF
7685 || fcode == BUILT_IN_POWL)
7687 tree arg00 = CALL_EXPR_ARG (arg, 0);
7688 tree arg01 = CALL_EXPR_ARG (arg, 1);
7689 if (tree_expr_nonnegative_p (arg00))
7691 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7692 const REAL_VALUE_TYPE dconstroot
7693 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7694 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7695 build_real (type, dconstroot));
7696 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7700 return NULL_TREE;
7703 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7704 TYPE is the type of the return value. Return NULL_TREE if no
7705 simplification can be made. */
7707 static tree
7708 fold_builtin_cos (location_t loc,
7709 tree arg, tree type, tree fndecl)
7711 tree res, narg;
7713 if (!validate_arg (arg, REAL_TYPE))
7714 return NULL_TREE;
7716 /* Calculate the result when the argument is a constant. */
7717 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7718 return res;
7720 /* Optimize cos(-x) into cos (x). */
7721 if ((narg = fold_strip_sign_ops (arg)))
7722 return build_call_expr_loc (loc, fndecl, 1, narg);
7724 return NULL_TREE;
7727 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7728 Return NULL_TREE if no simplification can be made. */
7730 static tree
7731 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7733 if (validate_arg (arg, REAL_TYPE))
7735 tree res, narg;
7737 /* Calculate the result when the argument is a constant. */
7738 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7739 return res;
7741 /* Optimize cosh(-x) into cosh (x). */
7742 if ((narg = fold_strip_sign_ops (arg)))
7743 return build_call_expr_loc (loc, fndecl, 1, narg);
7746 return NULL_TREE;
7749 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7750 argument ARG. TYPE is the type of the return value. Return
7751 NULL_TREE if no simplification can be made. */
7753 static tree
7754 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7755 bool hyper)
7757 if (validate_arg (arg, COMPLEX_TYPE)
7758 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7760 tree tmp;
7762 /* Calculate the result when the argument is a constant. */
7763 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7764 return tmp;
7766 /* Optimize fn(-x) into fn(x). */
7767 if ((tmp = fold_strip_sign_ops (arg)))
7768 return build_call_expr_loc (loc, fndecl, 1, tmp);
7771 return NULL_TREE;
7774 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7775 Return NULL_TREE if no simplification can be made. */
7777 static tree
7778 fold_builtin_tan (tree arg, tree type)
7780 enum built_in_function fcode;
7781 tree res;
7783 if (!validate_arg (arg, REAL_TYPE))
7784 return NULL_TREE;
7786 /* Calculate the result when the argument is a constant. */
7787 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7788 return res;
7790 /* Optimize tan(atan(x)) = x. */
7791 fcode = builtin_mathfn_code (arg);
7792 if (flag_unsafe_math_optimizations
7793 && (fcode == BUILT_IN_ATAN
7794 || fcode == BUILT_IN_ATANF
7795 || fcode == BUILT_IN_ATANL))
7796 return CALL_EXPR_ARG (arg, 0);
7798 return NULL_TREE;
7801 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7802 NULL_TREE if no simplification can be made. */
7804 static tree
7805 fold_builtin_sincos (location_t loc,
7806 tree arg0, tree arg1, tree arg2)
7808 tree type;
7809 tree res, fn, call;
7811 if (!validate_arg (arg0, REAL_TYPE)
7812 || !validate_arg (arg1, POINTER_TYPE)
7813 || !validate_arg (arg2, POINTER_TYPE))
7814 return NULL_TREE;
7816 type = TREE_TYPE (arg0);
7818 /* Calculate the result when the argument is a constant. */
7819 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7820 return res;
7822 /* Canonicalize sincos to cexpi. */
7823 if (!targetm.libc_has_function (function_c99_math_complex))
7824 return NULL_TREE;
7825 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7826 if (!fn)
7827 return NULL_TREE;
7829 call = build_call_expr_loc (loc, fn, 1, arg0);
7830 call = builtin_save_expr (call);
7832 return build2 (COMPOUND_EXPR, void_type_node,
7833 build2 (MODIFY_EXPR, void_type_node,
7834 build_fold_indirect_ref_loc (loc, arg1),
7835 build1 (IMAGPART_EXPR, type, call)),
7836 build2 (MODIFY_EXPR, void_type_node,
7837 build_fold_indirect_ref_loc (loc, arg2),
7838 build1 (REALPART_EXPR, type, call)));
7841 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7842 NULL_TREE if no simplification can be made. */
7844 static tree
7845 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7847 tree rtype;
7848 tree realp, imagp, ifn;
7849 tree res;
7851 if (!validate_arg (arg0, COMPLEX_TYPE)
7852 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7853 return NULL_TREE;
7855 /* Calculate the result when the argument is a constant. */
7856 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7857 return res;
7859 rtype = TREE_TYPE (TREE_TYPE (arg0));
7861 /* In case we can figure out the real part of arg0 and it is constant zero
7862 fold to cexpi. */
7863 if (!targetm.libc_has_function (function_c99_math_complex))
7864 return NULL_TREE;
7865 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7866 if (!ifn)
7867 return NULL_TREE;
7869 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7870 && real_zerop (realp))
7872 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7873 return build_call_expr_loc (loc, ifn, 1, narg);
7876 /* In case we can easily decompose real and imaginary parts split cexp
7877 to exp (r) * cexpi (i). */
7878 if (flag_unsafe_math_optimizations
7879 && realp)
7881 tree rfn, rcall, icall;
7883 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7884 if (!rfn)
7885 return NULL_TREE;
7887 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7888 if (!imagp)
7889 return NULL_TREE;
7891 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7892 icall = builtin_save_expr (icall);
7893 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7894 rcall = builtin_save_expr (rcall);
7895 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7896 fold_build2_loc (loc, MULT_EXPR, rtype,
7897 rcall,
7898 fold_build1_loc (loc, REALPART_EXPR,
7899 rtype, icall)),
7900 fold_build2_loc (loc, MULT_EXPR, rtype,
7901 rcall,
7902 fold_build1_loc (loc, IMAGPART_EXPR,
7903 rtype, icall)));
7906 return NULL_TREE;
7909 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7910 Return NULL_TREE if no simplification can be made. */
7912 static tree
7913 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7915 if (!validate_arg (arg, REAL_TYPE))
7916 return NULL_TREE;
7918 /* Optimize trunc of constant value. */
7919 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7921 REAL_VALUE_TYPE r, x;
7922 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7924 x = TREE_REAL_CST (arg);
7925 real_trunc (&r, TYPE_MODE (type), &x);
7926 return build_real (type, r);
7929 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7932 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7933 Return NULL_TREE if no simplification can be made. */
7935 static tree
7936 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7938 if (!validate_arg (arg, REAL_TYPE))
7939 return NULL_TREE;
7941 /* Optimize floor of constant value. */
7942 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7944 REAL_VALUE_TYPE x;
7946 x = TREE_REAL_CST (arg);
7947 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7949 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7950 REAL_VALUE_TYPE r;
7952 real_floor (&r, TYPE_MODE (type), &x);
7953 return build_real (type, r);
7957 /* Fold floor (x) where x is nonnegative to trunc (x). */
7958 if (tree_expr_nonnegative_p (arg))
7960 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7961 if (truncfn)
7962 return build_call_expr_loc (loc, truncfn, 1, arg);
7965 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7968 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7969 Return NULL_TREE if no simplification can be made. */
7971 static tree
7972 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7974 if (!validate_arg (arg, REAL_TYPE))
7975 return NULL_TREE;
7977 /* Optimize ceil of constant value. */
7978 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7980 REAL_VALUE_TYPE x;
7982 x = TREE_REAL_CST (arg);
7983 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7985 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7986 REAL_VALUE_TYPE r;
7988 real_ceil (&r, TYPE_MODE (type), &x);
7989 return build_real (type, r);
7993 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7996 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7997 Return NULL_TREE if no simplification can be made. */
7999 static tree
8000 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8002 if (!validate_arg (arg, REAL_TYPE))
8003 return NULL_TREE;
8005 /* Optimize round of constant value. */
8006 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8008 REAL_VALUE_TYPE x;
8010 x = TREE_REAL_CST (arg);
8011 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8013 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8014 REAL_VALUE_TYPE r;
8016 real_round (&r, TYPE_MODE (type), &x);
8017 return build_real (type, r);
8021 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8024 /* Fold function call to builtin lround, lroundf or lroundl (or the
8025 corresponding long long versions) and other rounding functions. ARG
8026 is the argument to the call. Return NULL_TREE if no simplification
8027 can be made. */
8029 static tree
8030 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8032 if (!validate_arg (arg, REAL_TYPE))
8033 return NULL_TREE;
8035 /* Optimize lround of constant value. */
8036 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8038 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8040 if (real_isfinite (&x))
8042 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8043 tree ftype = TREE_TYPE (arg);
8044 REAL_VALUE_TYPE r;
8045 bool fail = false;
8047 switch (DECL_FUNCTION_CODE (fndecl))
8049 CASE_FLT_FN (BUILT_IN_IFLOOR):
8050 CASE_FLT_FN (BUILT_IN_LFLOOR):
8051 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8052 real_floor (&r, TYPE_MODE (ftype), &x);
8053 break;
8055 CASE_FLT_FN (BUILT_IN_ICEIL):
8056 CASE_FLT_FN (BUILT_IN_LCEIL):
8057 CASE_FLT_FN (BUILT_IN_LLCEIL):
8058 real_ceil (&r, TYPE_MODE (ftype), &x);
8059 break;
8061 CASE_FLT_FN (BUILT_IN_IROUND):
8062 CASE_FLT_FN (BUILT_IN_LROUND):
8063 CASE_FLT_FN (BUILT_IN_LLROUND):
8064 real_round (&r, TYPE_MODE (ftype), &x);
8065 break;
8067 default:
8068 gcc_unreachable ();
8071 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8072 if (!fail)
8073 return wide_int_to_tree (itype, val);
8077 switch (DECL_FUNCTION_CODE (fndecl))
8079 CASE_FLT_FN (BUILT_IN_LFLOOR):
8080 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8081 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8082 if (tree_expr_nonnegative_p (arg))
8083 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8084 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8085 break;
8086 default:;
8089 return fold_fixed_mathfn (loc, fndecl, arg);
8092 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8093 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8094 the argument to the call. Return NULL_TREE if no simplification can
8095 be made. */
8097 static tree
8098 fold_builtin_bitop (tree fndecl, tree arg)
8100 if (!validate_arg (arg, INTEGER_TYPE))
8101 return NULL_TREE;
8103 /* Optimize for constant argument. */
8104 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8106 tree type = TREE_TYPE (arg);
8107 int result;
8109 switch (DECL_FUNCTION_CODE (fndecl))
8111 CASE_INT_FN (BUILT_IN_FFS):
8112 result = wi::ffs (arg);
8113 break;
8115 CASE_INT_FN (BUILT_IN_CLZ):
8116 if (wi::ne_p (arg, 0))
8117 result = wi::clz (arg);
8118 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8119 result = TYPE_PRECISION (type);
8120 break;
8122 CASE_INT_FN (BUILT_IN_CTZ):
8123 if (wi::ne_p (arg, 0))
8124 result = wi::ctz (arg);
8125 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8126 result = TYPE_PRECISION (type);
8127 break;
8129 CASE_INT_FN (BUILT_IN_CLRSB):
8130 result = wi::clrsb (arg);
8131 break;
8133 CASE_INT_FN (BUILT_IN_POPCOUNT):
8134 result = wi::popcount (arg);
8135 break;
8137 CASE_INT_FN (BUILT_IN_PARITY):
8138 result = wi::parity (arg);
8139 break;
8141 default:
8142 gcc_unreachable ();
8145 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8148 return NULL_TREE;
8151 /* Fold function call to builtin_bswap and the short, long and long long
8152 variants. Return NULL_TREE if no simplification can be made. */
8153 static tree
8154 fold_builtin_bswap (tree fndecl, tree arg)
8156 if (! validate_arg (arg, INTEGER_TYPE))
8157 return NULL_TREE;
8159 /* Optimize constant value. */
8160 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8162 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8164 switch (DECL_FUNCTION_CODE (fndecl))
8166 case BUILT_IN_BSWAP16:
8167 case BUILT_IN_BSWAP32:
8168 case BUILT_IN_BSWAP64:
8170 signop sgn = TYPE_SIGN (type);
8171 tree result =
8172 wide_int_to_tree (type,
8173 wide_int::from (arg, TYPE_PRECISION (type),
8174 sgn).bswap ());
8175 return result;
8177 default:
8178 gcc_unreachable ();
8182 return NULL_TREE;
8185 /* A subroutine of fold_builtin to fold the various logarithmic
8186 functions. Return NULL_TREE if no simplification can me made.
8187 FUNC is the corresponding MPFR logarithm function. */
8189 static tree
8190 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8191 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8193 if (validate_arg (arg, REAL_TYPE))
8195 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8196 tree res;
8197 const enum built_in_function fcode = builtin_mathfn_code (arg);
8199 /* Calculate the result when the argument is a constant. */
8200 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8201 return res;
8203 /* Special case, optimize logN(expN(x)) = x. */
8204 if (flag_unsafe_math_optimizations
8205 && ((func == mpfr_log
8206 && (fcode == BUILT_IN_EXP
8207 || fcode == BUILT_IN_EXPF
8208 || fcode == BUILT_IN_EXPL))
8209 || (func == mpfr_log2
8210 && (fcode == BUILT_IN_EXP2
8211 || fcode == BUILT_IN_EXP2F
8212 || fcode == BUILT_IN_EXP2L))
8213 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8214 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8216 /* Optimize logN(func()) for various exponential functions. We
8217 want to determine the value "x" and the power "exponent" in
8218 order to transform logN(x**exponent) into exponent*logN(x). */
8219 if (flag_unsafe_math_optimizations)
8221 tree exponent = 0, x = 0;
8223 switch (fcode)
8225 CASE_FLT_FN (BUILT_IN_EXP):
8226 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8227 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8228 dconst_e ()));
8229 exponent = CALL_EXPR_ARG (arg, 0);
8230 break;
8231 CASE_FLT_FN (BUILT_IN_EXP2):
8232 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8233 x = build_real (type, dconst2);
8234 exponent = CALL_EXPR_ARG (arg, 0);
8235 break;
8236 CASE_FLT_FN (BUILT_IN_EXP10):
8237 CASE_FLT_FN (BUILT_IN_POW10):
8238 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8240 REAL_VALUE_TYPE dconst10;
8241 real_from_integer (&dconst10, VOIDmode, 10, SIGNED);
8242 x = build_real (type, dconst10);
8244 exponent = CALL_EXPR_ARG (arg, 0);
8245 break;
8246 CASE_FLT_FN (BUILT_IN_SQRT):
8247 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8248 x = CALL_EXPR_ARG (arg, 0);
8249 exponent = build_real (type, dconsthalf);
8250 break;
8251 CASE_FLT_FN (BUILT_IN_CBRT):
8252 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8253 x = CALL_EXPR_ARG (arg, 0);
8254 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8255 dconst_third ()));
8256 break;
8257 CASE_FLT_FN (BUILT_IN_POW):
8258 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8259 x = CALL_EXPR_ARG (arg, 0);
8260 exponent = CALL_EXPR_ARG (arg, 1);
8261 break;
8262 default:
8263 break;
8266 /* Now perform the optimization. */
8267 if (x && exponent)
8269 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8270 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8275 return NULL_TREE;
8278 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8279 NULL_TREE if no simplification can be made. */
8281 static tree
8282 fold_builtin_hypot (location_t loc, tree fndecl,
8283 tree arg0, tree arg1, tree type)
8285 tree res, narg0, narg1;
8287 if (!validate_arg (arg0, REAL_TYPE)
8288 || !validate_arg (arg1, REAL_TYPE))
8289 return NULL_TREE;
8291 /* Calculate the result when the argument is a constant. */
8292 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8293 return res;
8295 /* If either argument to hypot has a negate or abs, strip that off.
8296 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8297 narg0 = fold_strip_sign_ops (arg0);
8298 narg1 = fold_strip_sign_ops (arg1);
8299 if (narg0 || narg1)
8301 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8302 narg1 ? narg1 : arg1);
8305 /* If either argument is zero, hypot is fabs of the other. */
8306 if (real_zerop (arg0))
8307 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8308 else if (real_zerop (arg1))
8309 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8311 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8312 if (flag_unsafe_math_optimizations
8313 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8315 const REAL_VALUE_TYPE sqrt2_trunc
8316 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8317 return fold_build2_loc (loc, MULT_EXPR, type,
8318 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8319 build_real (type, sqrt2_trunc));
8322 return NULL_TREE;
8326 /* Fold a builtin function call to pow, powf, or powl. Return
8327 NULL_TREE if no simplification can be made. */
8328 static tree
8329 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8331 tree res;
8333 if (!validate_arg (arg0, REAL_TYPE)
8334 || !validate_arg (arg1, REAL_TYPE))
8335 return NULL_TREE;
8337 /* Calculate the result when the argument is a constant. */
8338 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8339 return res;
8341 /* Optimize pow(1.0,y) = 1.0. */
8342 if (real_onep (arg0))
8343 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8345 if (TREE_CODE (arg1) == REAL_CST
8346 && !TREE_OVERFLOW (arg1))
8348 REAL_VALUE_TYPE cint;
8349 REAL_VALUE_TYPE c;
8350 HOST_WIDE_INT n;
8352 c = TREE_REAL_CST (arg1);
8354 /* Optimize pow(x,0.0) = 1.0. */
8355 if (REAL_VALUES_EQUAL (c, dconst0))
8356 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8357 arg0);
8359 /* Optimize pow(x,1.0) = x. */
8360 if (REAL_VALUES_EQUAL (c, dconst1))
8361 return arg0;
8363 /* Optimize pow(x,-1.0) = 1.0/x. */
8364 if (REAL_VALUES_EQUAL (c, dconstm1))
8365 return fold_build2_loc (loc, RDIV_EXPR, type,
8366 build_real (type, dconst1), arg0);
8368 /* Optimize pow(x,0.5) = sqrt(x). */
8369 if (flag_unsafe_math_optimizations
8370 && REAL_VALUES_EQUAL (c, dconsthalf))
8372 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8374 if (sqrtfn != NULL_TREE)
8375 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8378 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8379 if (flag_unsafe_math_optimizations)
8381 const REAL_VALUE_TYPE dconstroot
8382 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8384 if (REAL_VALUES_EQUAL (c, dconstroot))
8386 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8387 if (cbrtfn != NULL_TREE)
8388 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8392 /* Check for an integer exponent. */
8393 n = real_to_integer (&c);
8394 real_from_integer (&cint, VOIDmode, n, SIGNED);
8395 if (real_identical (&c, &cint))
8397 /* Attempt to evaluate pow at compile-time, unless this should
8398 raise an exception. */
8399 if (TREE_CODE (arg0) == REAL_CST
8400 && !TREE_OVERFLOW (arg0)
8401 && (n > 0
8402 || (!flag_trapping_math && !flag_errno_math)
8403 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8405 REAL_VALUE_TYPE x;
8406 bool inexact;
8408 x = TREE_REAL_CST (arg0);
8409 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8410 if (flag_unsafe_math_optimizations || !inexact)
8411 return build_real (type, x);
8414 /* Strip sign ops from even integer powers. */
8415 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8417 tree narg0 = fold_strip_sign_ops (arg0);
8418 if (narg0)
8419 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8424 if (flag_unsafe_math_optimizations)
8426 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8428 /* Optimize pow(expN(x),y) = expN(x*y). */
8429 if (BUILTIN_EXPONENT_P (fcode))
8431 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8432 tree arg = CALL_EXPR_ARG (arg0, 0);
8433 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8434 return build_call_expr_loc (loc, expfn, 1, arg);
8437 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8438 if (BUILTIN_SQRT_P (fcode))
8440 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8441 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8442 build_real (type, dconsthalf));
8443 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8446 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8447 if (BUILTIN_CBRT_P (fcode))
8449 tree arg = CALL_EXPR_ARG (arg0, 0);
8450 if (tree_expr_nonnegative_p (arg))
8452 const REAL_VALUE_TYPE dconstroot
8453 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8454 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8455 build_real (type, dconstroot));
8456 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8460 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8461 if (fcode == BUILT_IN_POW
8462 || fcode == BUILT_IN_POWF
8463 || fcode == BUILT_IN_POWL)
8465 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8466 if (tree_expr_nonnegative_p (arg00))
8468 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8469 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8470 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8475 return NULL_TREE;
8478 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8479 Return NULL_TREE if no simplification can be made. */
8480 static tree
8481 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8482 tree arg0, tree arg1, tree type)
8484 if (!validate_arg (arg0, REAL_TYPE)
8485 || !validate_arg (arg1, INTEGER_TYPE))
8486 return NULL_TREE;
8488 /* Optimize pow(1.0,y) = 1.0. */
8489 if (real_onep (arg0))
8490 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8492 if (tree_fits_shwi_p (arg1))
8494 HOST_WIDE_INT c = tree_to_shwi (arg1);
8496 /* Evaluate powi at compile-time. */
8497 if (TREE_CODE (arg0) == REAL_CST
8498 && !TREE_OVERFLOW (arg0))
8500 REAL_VALUE_TYPE x;
8501 x = TREE_REAL_CST (arg0);
8502 real_powi (&x, TYPE_MODE (type), &x, c);
8503 return build_real (type, x);
8506 /* Optimize pow(x,0) = 1.0. */
8507 if (c == 0)
8508 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8509 arg0);
8511 /* Optimize pow(x,1) = x. */
8512 if (c == 1)
8513 return arg0;
8515 /* Optimize pow(x,-1) = 1.0/x. */
8516 if (c == -1)
8517 return fold_build2_loc (loc, RDIV_EXPR, type,
8518 build_real (type, dconst1), arg0);
8521 return NULL_TREE;
8524 /* A subroutine of fold_builtin to fold the various exponent
8525 functions. Return NULL_TREE if no simplification can be made.
8526 FUNC is the corresponding MPFR exponent function. */
8528 static tree
8529 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8530 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8532 if (validate_arg (arg, REAL_TYPE))
8534 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8535 tree res;
8537 /* Calculate the result when the argument is a constant. */
8538 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8539 return res;
8541 /* Optimize expN(logN(x)) = x. */
8542 if (flag_unsafe_math_optimizations)
8544 const enum built_in_function fcode = builtin_mathfn_code (arg);
8546 if ((func == mpfr_exp
8547 && (fcode == BUILT_IN_LOG
8548 || fcode == BUILT_IN_LOGF
8549 || fcode == BUILT_IN_LOGL))
8550 || (func == mpfr_exp2
8551 && (fcode == BUILT_IN_LOG2
8552 || fcode == BUILT_IN_LOG2F
8553 || fcode == BUILT_IN_LOG2L))
8554 || (func == mpfr_exp10
8555 && (fcode == BUILT_IN_LOG10
8556 || fcode == BUILT_IN_LOG10F
8557 || fcode == BUILT_IN_LOG10L)))
8558 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8562 return NULL_TREE;
8565 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8566 Return NULL_TREE if no simplification can be made. */
8568 static tree
8569 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8571 tree fn, len, lenp1, call, type;
8573 if (!validate_arg (dest, POINTER_TYPE)
8574 || !validate_arg (src, POINTER_TYPE))
8575 return NULL_TREE;
8577 len = c_strlen (src, 1);
8578 if (!len
8579 || TREE_CODE (len) != INTEGER_CST)
8580 return NULL_TREE;
8582 if (optimize_function_for_size_p (cfun)
8583 /* If length is zero it's small enough. */
8584 && !integer_zerop (len))
8585 return NULL_TREE;
8587 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8588 if (!fn)
8589 return NULL_TREE;
8591 lenp1 = size_binop_loc (loc, PLUS_EXPR,
8592 fold_convert_loc (loc, size_type_node, len),
8593 build_int_cst (size_type_node, 1));
8594 /* We use dest twice in building our expression. Save it from
8595 multiple expansions. */
8596 dest = builtin_save_expr (dest);
8597 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8599 type = TREE_TYPE (TREE_TYPE (fndecl));
8600 dest = fold_build_pointer_plus_loc (loc, dest, len);
8601 dest = fold_convert_loc (loc, type, dest);
8602 dest = omit_one_operand_loc (loc, type, dest, call);
8603 return dest;
8606 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8607 arguments to the call, and TYPE is its return type.
8608 Return NULL_TREE if no simplification can be made. */
8610 static tree
8611 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8613 if (!validate_arg (arg1, POINTER_TYPE)
8614 || !validate_arg (arg2, INTEGER_TYPE)
8615 || !validate_arg (len, INTEGER_TYPE))
8616 return NULL_TREE;
8617 else
8619 const char *p1;
8621 if (TREE_CODE (arg2) != INTEGER_CST
8622 || !tree_fits_uhwi_p (len))
8623 return NULL_TREE;
8625 p1 = c_getstr (arg1);
8626 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8628 char c;
8629 const char *r;
8630 tree tem;
8632 if (target_char_cast (arg2, &c))
8633 return NULL_TREE;
8635 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8637 if (r == NULL)
8638 return build_int_cst (TREE_TYPE (arg1), 0);
8640 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8641 return fold_convert_loc (loc, type, tem);
8643 return NULL_TREE;
8647 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8648 Return NULL_TREE if no simplification can be made. */
8650 static tree
8651 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8653 const char *p1, *p2;
8655 if (!validate_arg (arg1, POINTER_TYPE)
8656 || !validate_arg (arg2, POINTER_TYPE)
8657 || !validate_arg (len, INTEGER_TYPE))
8658 return NULL_TREE;
8660 /* If the LEN parameter is zero, return zero. */
8661 if (integer_zerop (len))
8662 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8663 arg1, arg2);
8665 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8666 if (operand_equal_p (arg1, arg2, 0))
8667 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8669 p1 = c_getstr (arg1);
8670 p2 = c_getstr (arg2);
8672 /* If all arguments are constant, and the value of len is not greater
8673 than the lengths of arg1 and arg2, evaluate at compile-time. */
8674 if (tree_fits_uhwi_p (len) && p1 && p2
8675 && compare_tree_int (len, strlen (p1) + 1) <= 0
8676 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8678 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8680 if (r > 0)
8681 return integer_one_node;
8682 else if (r < 0)
8683 return integer_minus_one_node;
8684 else
8685 return integer_zero_node;
8688 /* If len parameter is one, return an expression corresponding to
8689 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8690 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8692 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8693 tree cst_uchar_ptr_node
8694 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8696 tree ind1
8697 = fold_convert_loc (loc, integer_type_node,
8698 build1 (INDIRECT_REF, cst_uchar_node,
8699 fold_convert_loc (loc,
8700 cst_uchar_ptr_node,
8701 arg1)));
8702 tree ind2
8703 = fold_convert_loc (loc, integer_type_node,
8704 build1 (INDIRECT_REF, cst_uchar_node,
8705 fold_convert_loc (loc,
8706 cst_uchar_ptr_node,
8707 arg2)));
8708 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8711 return NULL_TREE;
8714 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8715 Return NULL_TREE if no simplification can be made. */
8717 static tree
8718 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8720 const char *p1, *p2;
8722 if (!validate_arg (arg1, POINTER_TYPE)
8723 || !validate_arg (arg2, POINTER_TYPE))
8724 return NULL_TREE;
8726 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8727 if (operand_equal_p (arg1, arg2, 0))
8728 return integer_zero_node;
8730 p1 = c_getstr (arg1);
8731 p2 = c_getstr (arg2);
8733 if (p1 && p2)
8735 const int i = strcmp (p1, p2);
8736 if (i < 0)
8737 return integer_minus_one_node;
8738 else if (i > 0)
8739 return integer_one_node;
8740 else
8741 return integer_zero_node;
8744 /* If the second arg is "", return *(const unsigned char*)arg1. */
8745 if (p2 && *p2 == '\0')
8747 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8748 tree cst_uchar_ptr_node
8749 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8751 return fold_convert_loc (loc, integer_type_node,
8752 build1 (INDIRECT_REF, cst_uchar_node,
8753 fold_convert_loc (loc,
8754 cst_uchar_ptr_node,
8755 arg1)));
8758 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8759 if (p1 && *p1 == '\0')
8761 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8762 tree cst_uchar_ptr_node
8763 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8765 tree temp
8766 = fold_convert_loc (loc, integer_type_node,
8767 build1 (INDIRECT_REF, cst_uchar_node,
8768 fold_convert_loc (loc,
8769 cst_uchar_ptr_node,
8770 arg2)));
8771 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8774 return NULL_TREE;
8777 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8778 Return NULL_TREE if no simplification can be made. */
8780 static tree
8781 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8783 const char *p1, *p2;
8785 if (!validate_arg (arg1, POINTER_TYPE)
8786 || !validate_arg (arg2, POINTER_TYPE)
8787 || !validate_arg (len, INTEGER_TYPE))
8788 return NULL_TREE;
8790 /* If the LEN parameter is zero, return zero. */
8791 if (integer_zerop (len))
8792 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8793 arg1, arg2);
8795 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8796 if (operand_equal_p (arg1, arg2, 0))
8797 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8799 p1 = c_getstr (arg1);
8800 p2 = c_getstr (arg2);
8802 if (tree_fits_uhwi_p (len) && p1 && p2)
8804 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8805 if (i > 0)
8806 return integer_one_node;
8807 else if (i < 0)
8808 return integer_minus_one_node;
8809 else
8810 return integer_zero_node;
8813 /* If the second arg is "", and the length is greater than zero,
8814 return *(const unsigned char*)arg1. */
8815 if (p2 && *p2 == '\0'
8816 && TREE_CODE (len) == INTEGER_CST
8817 && tree_int_cst_sgn (len) == 1)
8819 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8820 tree cst_uchar_ptr_node
8821 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8823 return fold_convert_loc (loc, integer_type_node,
8824 build1 (INDIRECT_REF, cst_uchar_node,
8825 fold_convert_loc (loc,
8826 cst_uchar_ptr_node,
8827 arg1)));
8830 /* If the first arg is "", and the length is greater than zero,
8831 return -*(const unsigned char*)arg2. */
8832 if (p1 && *p1 == '\0'
8833 && TREE_CODE (len) == INTEGER_CST
8834 && tree_int_cst_sgn (len) == 1)
8836 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8837 tree cst_uchar_ptr_node
8838 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8840 tree temp = fold_convert_loc (loc, integer_type_node,
8841 build1 (INDIRECT_REF, cst_uchar_node,
8842 fold_convert_loc (loc,
8843 cst_uchar_ptr_node,
8844 arg2)));
8845 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8848 /* If len parameter is one, return an expression corresponding to
8849 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8850 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8852 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8853 tree cst_uchar_ptr_node
8854 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8856 tree ind1 = fold_convert_loc (loc, integer_type_node,
8857 build1 (INDIRECT_REF, cst_uchar_node,
8858 fold_convert_loc (loc,
8859 cst_uchar_ptr_node,
8860 arg1)));
8861 tree ind2 = fold_convert_loc (loc, integer_type_node,
8862 build1 (INDIRECT_REF, cst_uchar_node,
8863 fold_convert_loc (loc,
8864 cst_uchar_ptr_node,
8865 arg2)));
8866 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8869 return NULL_TREE;
8872 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8873 ARG. Return NULL_TREE if no simplification can be made. */
8875 static tree
8876 fold_builtin_signbit (location_t loc, tree arg, tree type)
8878 if (!validate_arg (arg, REAL_TYPE))
8879 return NULL_TREE;
8881 /* If ARG is a compile-time constant, determine the result. */
8882 if (TREE_CODE (arg) == REAL_CST
8883 && !TREE_OVERFLOW (arg))
8885 REAL_VALUE_TYPE c;
8887 c = TREE_REAL_CST (arg);
8888 return (REAL_VALUE_NEGATIVE (c)
8889 ? build_one_cst (type)
8890 : build_zero_cst (type));
8893 /* If ARG is non-negative, the result is always zero. */
8894 if (tree_expr_nonnegative_p (arg))
8895 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8897 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8898 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8899 return fold_convert (type,
8900 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8901 build_real (TREE_TYPE (arg), dconst0)));
8903 return NULL_TREE;
8906 /* Fold function call to builtin copysign, copysignf or copysignl with
8907 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8908 be made. */
8910 static tree
8911 fold_builtin_copysign (location_t loc, tree fndecl,
8912 tree arg1, tree arg2, tree type)
8914 tree tem;
8916 if (!validate_arg (arg1, REAL_TYPE)
8917 || !validate_arg (arg2, REAL_TYPE))
8918 return NULL_TREE;
8920 /* copysign(X,X) is X. */
8921 if (operand_equal_p (arg1, arg2, 0))
8922 return fold_convert_loc (loc, type, arg1);
8924 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8925 if (TREE_CODE (arg1) == REAL_CST
8926 && TREE_CODE (arg2) == REAL_CST
8927 && !TREE_OVERFLOW (arg1)
8928 && !TREE_OVERFLOW (arg2))
8930 REAL_VALUE_TYPE c1, c2;
8932 c1 = TREE_REAL_CST (arg1);
8933 c2 = TREE_REAL_CST (arg2);
8934 /* c1.sign := c2.sign. */
8935 real_copysign (&c1, &c2);
8936 return build_real (type, c1);
8939 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8940 Remember to evaluate Y for side-effects. */
8941 if (tree_expr_nonnegative_p (arg2))
8942 return omit_one_operand_loc (loc, type,
8943 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8944 arg2);
8946 /* Strip sign changing operations for the first argument. */
8947 tem = fold_strip_sign_ops (arg1);
8948 if (tem)
8949 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8951 return NULL_TREE;
8954 /* Fold a call to builtin isascii with argument ARG. */
8956 static tree
8957 fold_builtin_isascii (location_t loc, tree arg)
8959 if (!validate_arg (arg, INTEGER_TYPE))
8960 return NULL_TREE;
8961 else
8963 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8964 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8965 build_int_cst (integer_type_node,
8966 ~ (unsigned HOST_WIDE_INT) 0x7f));
8967 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8968 arg, integer_zero_node);
8972 /* Fold a call to builtin toascii with argument ARG. */
8974 static tree
8975 fold_builtin_toascii (location_t loc, tree arg)
8977 if (!validate_arg (arg, INTEGER_TYPE))
8978 return NULL_TREE;
8980 /* Transform toascii(c) -> (c & 0x7f). */
8981 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8982 build_int_cst (integer_type_node, 0x7f));
8985 /* Fold a call to builtin isdigit with argument ARG. */
8987 static tree
8988 fold_builtin_isdigit (location_t loc, tree arg)
8990 if (!validate_arg (arg, INTEGER_TYPE))
8991 return NULL_TREE;
8992 else
8994 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8995 /* According to the C standard, isdigit is unaffected by locale.
8996 However, it definitely is affected by the target character set. */
8997 unsigned HOST_WIDE_INT target_digit0
8998 = lang_hooks.to_target_charset ('0');
9000 if (target_digit0 == 0)
9001 return NULL_TREE;
9003 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9004 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9005 build_int_cst (unsigned_type_node, target_digit0));
9006 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9007 build_int_cst (unsigned_type_node, 9));
9011 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9013 static tree
9014 fold_builtin_fabs (location_t loc, tree arg, tree type)
9016 if (!validate_arg (arg, REAL_TYPE))
9017 return NULL_TREE;
9019 arg = fold_convert_loc (loc, type, arg);
9020 if (TREE_CODE (arg) == REAL_CST)
9021 return fold_abs_const (arg, type);
9022 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9025 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9027 static tree
9028 fold_builtin_abs (location_t loc, tree arg, tree type)
9030 if (!validate_arg (arg, INTEGER_TYPE))
9031 return NULL_TREE;
9033 arg = fold_convert_loc (loc, type, arg);
9034 if (TREE_CODE (arg) == INTEGER_CST)
9035 return fold_abs_const (arg, type);
9036 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9039 /* Fold a fma operation with arguments ARG[012]. */
9041 tree
9042 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9043 tree type, tree arg0, tree arg1, tree arg2)
9045 if (TREE_CODE (arg0) == REAL_CST
9046 && TREE_CODE (arg1) == REAL_CST
9047 && TREE_CODE (arg2) == REAL_CST)
9048 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9050 return NULL_TREE;
9053 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9055 static tree
9056 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9058 if (validate_arg (arg0, REAL_TYPE)
9059 && validate_arg (arg1, REAL_TYPE)
9060 && validate_arg (arg2, REAL_TYPE))
9062 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9063 if (tem)
9064 return tem;
9066 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9067 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9068 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9070 return NULL_TREE;
9073 /* Fold a call to builtin fmin or fmax. */
9075 static tree
9076 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9077 tree type, bool max)
9079 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9081 /* Calculate the result when the argument is a constant. */
9082 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9084 if (res)
9085 return res;
9087 /* If either argument is NaN, return the other one. Avoid the
9088 transformation if we get (and honor) a signalling NaN. Using
9089 omit_one_operand() ensures we create a non-lvalue. */
9090 if (TREE_CODE (arg0) == REAL_CST
9091 && real_isnan (&TREE_REAL_CST (arg0))
9092 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9093 || ! TREE_REAL_CST (arg0).signalling))
9094 return omit_one_operand_loc (loc, type, arg1, arg0);
9095 if (TREE_CODE (arg1) == REAL_CST
9096 && real_isnan (&TREE_REAL_CST (arg1))
9097 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9098 || ! TREE_REAL_CST (arg1).signalling))
9099 return omit_one_operand_loc (loc, type, arg0, arg1);
9101 /* Transform fmin/fmax(x,x) -> x. */
9102 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9103 return omit_one_operand_loc (loc, type, arg0, arg1);
9105 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9106 functions to return the numeric arg if the other one is NaN.
9107 These tree codes don't honor that, so only transform if
9108 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9109 handled, so we don't have to worry about it either. */
9110 if (flag_finite_math_only)
9111 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9112 fold_convert_loc (loc, type, arg0),
9113 fold_convert_loc (loc, type, arg1));
9115 return NULL_TREE;
9118 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9120 static tree
9121 fold_builtin_carg (location_t loc, tree arg, tree type)
9123 if (validate_arg (arg, COMPLEX_TYPE)
9124 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9126 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9128 if (atan2_fn)
9130 tree new_arg = builtin_save_expr (arg);
9131 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9132 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9133 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9137 return NULL_TREE;
9140 /* Fold a call to builtin logb/ilogb. */
9142 static tree
9143 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9145 if (! validate_arg (arg, REAL_TYPE))
9146 return NULL_TREE;
9148 STRIP_NOPS (arg);
9150 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9152 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9154 switch (value->cl)
9156 case rvc_nan:
9157 case rvc_inf:
9158 /* If arg is Inf or NaN and we're logb, return it. */
9159 if (TREE_CODE (rettype) == REAL_TYPE)
9161 /* For logb(-Inf) we have to return +Inf. */
9162 if (real_isinf (value) && real_isneg (value))
9164 REAL_VALUE_TYPE tem;
9165 real_inf (&tem);
9166 return build_real (rettype, tem);
9168 return fold_convert_loc (loc, rettype, arg);
9170 /* Fall through... */
9171 case rvc_zero:
9172 /* Zero may set errno and/or raise an exception for logb, also
9173 for ilogb we don't know FP_ILOGB0. */
9174 return NULL_TREE;
9175 case rvc_normal:
9176 /* For normal numbers, proceed iff radix == 2. In GCC,
9177 normalized significands are in the range [0.5, 1.0). We
9178 want the exponent as if they were [1.0, 2.0) so get the
9179 exponent and subtract 1. */
9180 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9181 return fold_convert_loc (loc, rettype,
9182 build_int_cst (integer_type_node,
9183 REAL_EXP (value)-1));
9184 break;
9188 return NULL_TREE;
9191 /* Fold a call to builtin significand, if radix == 2. */
9193 static tree
9194 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9196 if (! validate_arg (arg, REAL_TYPE))
9197 return NULL_TREE;
9199 STRIP_NOPS (arg);
9201 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9203 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9205 switch (value->cl)
9207 case rvc_zero:
9208 case rvc_nan:
9209 case rvc_inf:
9210 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9211 return fold_convert_loc (loc, rettype, arg);
9212 case rvc_normal:
9213 /* For normal numbers, proceed iff radix == 2. */
9214 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9216 REAL_VALUE_TYPE result = *value;
9217 /* In GCC, normalized significands are in the range [0.5,
9218 1.0). We want them to be [1.0, 2.0) so set the
9219 exponent to 1. */
9220 SET_REAL_EXP (&result, 1);
9221 return build_real (rettype, result);
9223 break;
9227 return NULL_TREE;
9230 /* Fold a call to builtin frexp, we can assume the base is 2. */
9232 static tree
9233 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9235 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9236 return NULL_TREE;
9238 STRIP_NOPS (arg0);
9240 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9241 return NULL_TREE;
9243 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9245 /* Proceed if a valid pointer type was passed in. */
9246 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9248 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9249 tree frac, exp;
9251 switch (value->cl)
9253 case rvc_zero:
9254 /* For +-0, return (*exp = 0, +-0). */
9255 exp = integer_zero_node;
9256 frac = arg0;
9257 break;
9258 case rvc_nan:
9259 case rvc_inf:
9260 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9261 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9262 case rvc_normal:
9264 /* Since the frexp function always expects base 2, and in
9265 GCC normalized significands are already in the range
9266 [0.5, 1.0), we have exactly what frexp wants. */
9267 REAL_VALUE_TYPE frac_rvt = *value;
9268 SET_REAL_EXP (&frac_rvt, 0);
9269 frac = build_real (rettype, frac_rvt);
9270 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9272 break;
9273 default:
9274 gcc_unreachable ();
9277 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9278 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9279 TREE_SIDE_EFFECTS (arg1) = 1;
9280 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9283 return NULL_TREE;
9286 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9287 then we can assume the base is two. If it's false, then we have to
9288 check the mode of the TYPE parameter in certain cases. */
9290 static tree
9291 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9292 tree type, bool ldexp)
9294 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9296 STRIP_NOPS (arg0);
9297 STRIP_NOPS (arg1);
9299 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9300 if (real_zerop (arg0) || integer_zerop (arg1)
9301 || (TREE_CODE (arg0) == REAL_CST
9302 && !real_isfinite (&TREE_REAL_CST (arg0))))
9303 return omit_one_operand_loc (loc, type, arg0, arg1);
9305 /* If both arguments are constant, then try to evaluate it. */
9306 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9307 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9308 && tree_fits_shwi_p (arg1))
9310 /* Bound the maximum adjustment to twice the range of the
9311 mode's valid exponents. Use abs to ensure the range is
9312 positive as a sanity check. */
9313 const long max_exp_adj = 2 *
9314 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9315 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9317 /* Get the user-requested adjustment. */
9318 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9320 /* The requested adjustment must be inside this range. This
9321 is a preliminary cap to avoid things like overflow, we
9322 may still fail to compute the result for other reasons. */
9323 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9325 REAL_VALUE_TYPE initial_result;
9327 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9329 /* Ensure we didn't overflow. */
9330 if (! real_isinf (&initial_result))
9332 const REAL_VALUE_TYPE trunc_result
9333 = real_value_truncate (TYPE_MODE (type), initial_result);
9335 /* Only proceed if the target mode can hold the
9336 resulting value. */
9337 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9338 return build_real (type, trunc_result);
9344 return NULL_TREE;
9347 /* Fold a call to builtin modf. */
9349 static tree
9350 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9352 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9353 return NULL_TREE;
9355 STRIP_NOPS (arg0);
9357 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9358 return NULL_TREE;
9360 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9362 /* Proceed if a valid pointer type was passed in. */
9363 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9365 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9366 REAL_VALUE_TYPE trunc, frac;
9368 switch (value->cl)
9370 case rvc_nan:
9371 case rvc_zero:
9372 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9373 trunc = frac = *value;
9374 break;
9375 case rvc_inf:
9376 /* For +-Inf, return (*arg1 = arg0, +-0). */
9377 frac = dconst0;
9378 frac.sign = value->sign;
9379 trunc = *value;
9380 break;
9381 case rvc_normal:
9382 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9383 real_trunc (&trunc, VOIDmode, value);
9384 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9385 /* If the original number was negative and already
9386 integral, then the fractional part is -0.0. */
9387 if (value->sign && frac.cl == rvc_zero)
9388 frac.sign = value->sign;
9389 break;
9392 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9393 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9394 build_real (rettype, trunc));
9395 TREE_SIDE_EFFECTS (arg1) = 1;
9396 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9397 build_real (rettype, frac));
9400 return NULL_TREE;
9403 /* Given a location LOC, an interclass builtin function decl FNDECL
9404 and its single argument ARG, return an folded expression computing
9405 the same, or NULL_TREE if we either couldn't or didn't want to fold
9406 (the latter happen if there's an RTL instruction available). */
9408 static tree
9409 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9411 machine_mode mode;
9413 if (!validate_arg (arg, REAL_TYPE))
9414 return NULL_TREE;
9416 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9417 return NULL_TREE;
9419 mode = TYPE_MODE (TREE_TYPE (arg));
9421 /* If there is no optab, try generic code. */
9422 switch (DECL_FUNCTION_CODE (fndecl))
9424 tree result;
9426 CASE_FLT_FN (BUILT_IN_ISINF):
9428 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9429 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9430 tree const type = TREE_TYPE (arg);
9431 REAL_VALUE_TYPE r;
9432 char buf[128];
9434 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9435 real_from_string (&r, buf);
9436 result = build_call_expr (isgr_fn, 2,
9437 fold_build1_loc (loc, ABS_EXPR, type, arg),
9438 build_real (type, r));
9439 return result;
9441 CASE_FLT_FN (BUILT_IN_FINITE):
9442 case BUILT_IN_ISFINITE:
9444 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9445 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9446 tree const type = TREE_TYPE (arg);
9447 REAL_VALUE_TYPE r;
9448 char buf[128];
9450 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9451 real_from_string (&r, buf);
9452 result = build_call_expr (isle_fn, 2,
9453 fold_build1_loc (loc, ABS_EXPR, type, arg),
9454 build_real (type, r));
9455 /*result = fold_build2_loc (loc, UNGT_EXPR,
9456 TREE_TYPE (TREE_TYPE (fndecl)),
9457 fold_build1_loc (loc, ABS_EXPR, type, arg),
9458 build_real (type, r));
9459 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9460 TREE_TYPE (TREE_TYPE (fndecl)),
9461 result);*/
9462 return result;
9464 case BUILT_IN_ISNORMAL:
9466 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9467 islessequal(fabs(x),DBL_MAX). */
9468 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9469 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9470 tree const type = TREE_TYPE (arg);
9471 REAL_VALUE_TYPE rmax, rmin;
9472 char buf[128];
9474 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9475 real_from_string (&rmax, buf);
9476 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9477 real_from_string (&rmin, buf);
9478 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9479 result = build_call_expr (isle_fn, 2, arg,
9480 build_real (type, rmax));
9481 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9482 build_call_expr (isge_fn, 2, arg,
9483 build_real (type, rmin)));
9484 return result;
9486 default:
9487 break;
9490 return NULL_TREE;
9493 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9494 ARG is the argument for the call. */
9496 static tree
9497 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9499 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9500 REAL_VALUE_TYPE r;
9502 if (!validate_arg (arg, REAL_TYPE))
9503 return NULL_TREE;
9505 switch (builtin_index)
9507 case BUILT_IN_ISINF:
9508 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9509 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9511 if (TREE_CODE (arg) == REAL_CST)
9513 r = TREE_REAL_CST (arg);
9514 if (real_isinf (&r))
9515 return real_compare (GT_EXPR, &r, &dconst0)
9516 ? integer_one_node : integer_minus_one_node;
9517 else
9518 return integer_zero_node;
9521 return NULL_TREE;
9523 case BUILT_IN_ISINF_SIGN:
9525 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9526 /* In a boolean context, GCC will fold the inner COND_EXPR to
9527 1. So e.g. "if (isinf_sign(x))" would be folded to just
9528 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9529 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9530 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9531 tree tmp = NULL_TREE;
9533 arg = builtin_save_expr (arg);
9535 if (signbit_fn && isinf_fn)
9537 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9538 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9540 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9541 signbit_call, integer_zero_node);
9542 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9543 isinf_call, integer_zero_node);
9545 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9546 integer_minus_one_node, integer_one_node);
9547 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9548 isinf_call, tmp,
9549 integer_zero_node);
9552 return tmp;
9555 case BUILT_IN_ISFINITE:
9556 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9557 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9558 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9560 if (TREE_CODE (arg) == REAL_CST)
9562 r = TREE_REAL_CST (arg);
9563 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9566 return NULL_TREE;
9568 case BUILT_IN_ISNAN:
9569 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9570 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9572 if (TREE_CODE (arg) == REAL_CST)
9574 r = TREE_REAL_CST (arg);
9575 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9578 arg = builtin_save_expr (arg);
9579 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9581 default:
9582 gcc_unreachable ();
9586 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9587 This builtin will generate code to return the appropriate floating
9588 point classification depending on the value of the floating point
9589 number passed in. The possible return values must be supplied as
9590 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9591 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9592 one floating point argument which is "type generic". */
9594 static tree
9595 fold_builtin_fpclassify (location_t loc, tree exp)
9597 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9598 arg, type, res, tmp;
9599 machine_mode mode;
9600 REAL_VALUE_TYPE r;
9601 char buf[128];
9603 /* Verify the required arguments in the original call. */
9604 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9605 INTEGER_TYPE, INTEGER_TYPE,
9606 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9607 return NULL_TREE;
9609 fp_nan = CALL_EXPR_ARG (exp, 0);
9610 fp_infinite = CALL_EXPR_ARG (exp, 1);
9611 fp_normal = CALL_EXPR_ARG (exp, 2);
9612 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9613 fp_zero = CALL_EXPR_ARG (exp, 4);
9614 arg = CALL_EXPR_ARG (exp, 5);
9615 type = TREE_TYPE (arg);
9616 mode = TYPE_MODE (type);
9617 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9619 /* fpclassify(x) ->
9620 isnan(x) ? FP_NAN :
9621 (fabs(x) == Inf ? FP_INFINITE :
9622 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9623 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9625 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9626 build_real (type, dconst0));
9627 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9628 tmp, fp_zero, fp_subnormal);
9630 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9631 real_from_string (&r, buf);
9632 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9633 arg, build_real (type, r));
9634 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9636 if (HONOR_INFINITIES (mode))
9638 real_inf (&r);
9639 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9640 build_real (type, r));
9641 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9642 fp_infinite, res);
9645 if (HONOR_NANS (mode))
9647 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9648 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9651 return res;
9654 /* Fold a call to an unordered comparison function such as
9655 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9656 being called and ARG0 and ARG1 are the arguments for the call.
9657 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9658 the opposite of the desired result. UNORDERED_CODE is used
9659 for modes that can hold NaNs and ORDERED_CODE is used for
9660 the rest. */
9662 static tree
9663 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9664 enum tree_code unordered_code,
9665 enum tree_code ordered_code)
9667 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9668 enum tree_code code;
9669 tree type0, type1;
9670 enum tree_code code0, code1;
9671 tree cmp_type = NULL_TREE;
9673 type0 = TREE_TYPE (arg0);
9674 type1 = TREE_TYPE (arg1);
9676 code0 = TREE_CODE (type0);
9677 code1 = TREE_CODE (type1);
9679 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9680 /* Choose the wider of two real types. */
9681 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9682 ? type0 : type1;
9683 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9684 cmp_type = type0;
9685 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9686 cmp_type = type1;
9688 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9689 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9691 if (unordered_code == UNORDERED_EXPR)
9693 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9694 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9695 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9698 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9699 : ordered_code;
9700 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9701 fold_build2_loc (loc, code, type, arg0, arg1));
9704 /* Fold a call to built-in function FNDECL with 0 arguments.
9705 IGNORE is true if the result of the function call is ignored. This
9706 function returns NULL_TREE if no simplification was possible. */
9708 static tree
9709 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9711 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9712 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9713 switch (fcode)
9715 CASE_FLT_FN (BUILT_IN_INF):
9716 case BUILT_IN_INFD32:
9717 case BUILT_IN_INFD64:
9718 case BUILT_IN_INFD128:
9719 return fold_builtin_inf (loc, type, true);
9721 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9722 return fold_builtin_inf (loc, type, false);
9724 case BUILT_IN_CLASSIFY_TYPE:
9725 return fold_builtin_classify_type (NULL_TREE);
9727 case BUILT_IN_UNREACHABLE:
9728 if (flag_sanitize & SANITIZE_UNREACHABLE
9729 && (current_function_decl == NULL
9730 || !lookup_attribute ("no_sanitize_undefined",
9731 DECL_ATTRIBUTES (current_function_decl))))
9732 return ubsan_instrument_unreachable (loc);
9733 break;
9735 default:
9736 break;
9738 return NULL_TREE;
9741 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9742 IGNORE is true if the result of the function call is ignored. This
9743 function returns NULL_TREE if no simplification was possible. */
9745 static tree
9746 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9748 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9749 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9750 switch (fcode)
9752 case BUILT_IN_CONSTANT_P:
9754 tree val = fold_builtin_constant_p (arg0);
9756 /* Gimplification will pull the CALL_EXPR for the builtin out of
9757 an if condition. When not optimizing, we'll not CSE it back.
9758 To avoid link error types of regressions, return false now. */
9759 if (!val && !optimize)
9760 val = integer_zero_node;
9762 return val;
9765 case BUILT_IN_CLASSIFY_TYPE:
9766 return fold_builtin_classify_type (arg0);
9768 case BUILT_IN_STRLEN:
9769 return fold_builtin_strlen (loc, type, arg0);
9771 CASE_FLT_FN (BUILT_IN_FABS):
9772 case BUILT_IN_FABSD32:
9773 case BUILT_IN_FABSD64:
9774 case BUILT_IN_FABSD128:
9775 return fold_builtin_fabs (loc, arg0, type);
9777 case BUILT_IN_ABS:
9778 case BUILT_IN_LABS:
9779 case BUILT_IN_LLABS:
9780 case BUILT_IN_IMAXABS:
9781 return fold_builtin_abs (loc, arg0, type);
9783 CASE_FLT_FN (BUILT_IN_CONJ):
9784 if (validate_arg (arg0, COMPLEX_TYPE)
9785 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9786 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9787 break;
9789 CASE_FLT_FN (BUILT_IN_CREAL):
9790 if (validate_arg (arg0, COMPLEX_TYPE)
9791 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9792 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9793 break;
9795 CASE_FLT_FN (BUILT_IN_CIMAG):
9796 if (validate_arg (arg0, COMPLEX_TYPE)
9797 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9798 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9799 break;
9801 CASE_FLT_FN (BUILT_IN_CCOS):
9802 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9804 CASE_FLT_FN (BUILT_IN_CCOSH):
9805 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9807 CASE_FLT_FN (BUILT_IN_CPROJ):
9808 return fold_builtin_cproj (loc, arg0, type);
9810 CASE_FLT_FN (BUILT_IN_CSIN):
9811 if (validate_arg (arg0, COMPLEX_TYPE)
9812 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9813 return do_mpc_arg1 (arg0, type, mpc_sin);
9814 break;
9816 CASE_FLT_FN (BUILT_IN_CSINH):
9817 if (validate_arg (arg0, COMPLEX_TYPE)
9818 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9819 return do_mpc_arg1 (arg0, type, mpc_sinh);
9820 break;
9822 CASE_FLT_FN (BUILT_IN_CTAN):
9823 if (validate_arg (arg0, COMPLEX_TYPE)
9824 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9825 return do_mpc_arg1 (arg0, type, mpc_tan);
9826 break;
9828 CASE_FLT_FN (BUILT_IN_CTANH):
9829 if (validate_arg (arg0, COMPLEX_TYPE)
9830 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9831 return do_mpc_arg1 (arg0, type, mpc_tanh);
9832 break;
9834 CASE_FLT_FN (BUILT_IN_CLOG):
9835 if (validate_arg (arg0, COMPLEX_TYPE)
9836 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9837 return do_mpc_arg1 (arg0, type, mpc_log);
9838 break;
9840 CASE_FLT_FN (BUILT_IN_CSQRT):
9841 if (validate_arg (arg0, COMPLEX_TYPE)
9842 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9843 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9844 break;
9846 CASE_FLT_FN (BUILT_IN_CASIN):
9847 if (validate_arg (arg0, COMPLEX_TYPE)
9848 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9849 return do_mpc_arg1 (arg0, type, mpc_asin);
9850 break;
9852 CASE_FLT_FN (BUILT_IN_CACOS):
9853 if (validate_arg (arg0, COMPLEX_TYPE)
9854 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9855 return do_mpc_arg1 (arg0, type, mpc_acos);
9856 break;
9858 CASE_FLT_FN (BUILT_IN_CATAN):
9859 if (validate_arg (arg0, COMPLEX_TYPE)
9860 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9861 return do_mpc_arg1 (arg0, type, mpc_atan);
9862 break;
9864 CASE_FLT_FN (BUILT_IN_CASINH):
9865 if (validate_arg (arg0, COMPLEX_TYPE)
9866 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9867 return do_mpc_arg1 (arg0, type, mpc_asinh);
9868 break;
9870 CASE_FLT_FN (BUILT_IN_CACOSH):
9871 if (validate_arg (arg0, COMPLEX_TYPE)
9872 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9873 return do_mpc_arg1 (arg0, type, mpc_acosh);
9874 break;
9876 CASE_FLT_FN (BUILT_IN_CATANH):
9877 if (validate_arg (arg0, COMPLEX_TYPE)
9878 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9879 return do_mpc_arg1 (arg0, type, mpc_atanh);
9880 break;
9882 CASE_FLT_FN (BUILT_IN_CABS):
9883 return fold_builtin_cabs (loc, arg0, type, fndecl);
9885 CASE_FLT_FN (BUILT_IN_CARG):
9886 return fold_builtin_carg (loc, arg0, type);
9888 CASE_FLT_FN (BUILT_IN_SQRT):
9889 return fold_builtin_sqrt (loc, arg0, type);
9891 CASE_FLT_FN (BUILT_IN_CBRT):
9892 return fold_builtin_cbrt (loc, arg0, type);
9894 CASE_FLT_FN (BUILT_IN_ASIN):
9895 if (validate_arg (arg0, REAL_TYPE))
9896 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9897 &dconstm1, &dconst1, true);
9898 break;
9900 CASE_FLT_FN (BUILT_IN_ACOS):
9901 if (validate_arg (arg0, REAL_TYPE))
9902 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9903 &dconstm1, &dconst1, true);
9904 break;
9906 CASE_FLT_FN (BUILT_IN_ATAN):
9907 if (validate_arg (arg0, REAL_TYPE))
9908 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9909 break;
9911 CASE_FLT_FN (BUILT_IN_ASINH):
9912 if (validate_arg (arg0, REAL_TYPE))
9913 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9914 break;
9916 CASE_FLT_FN (BUILT_IN_ACOSH):
9917 if (validate_arg (arg0, REAL_TYPE))
9918 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9919 &dconst1, NULL, true);
9920 break;
9922 CASE_FLT_FN (BUILT_IN_ATANH):
9923 if (validate_arg (arg0, REAL_TYPE))
9924 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9925 &dconstm1, &dconst1, false);
9926 break;
9928 CASE_FLT_FN (BUILT_IN_SIN):
9929 if (validate_arg (arg0, REAL_TYPE))
9930 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9931 break;
9933 CASE_FLT_FN (BUILT_IN_COS):
9934 return fold_builtin_cos (loc, arg0, type, fndecl);
9936 CASE_FLT_FN (BUILT_IN_TAN):
9937 return fold_builtin_tan (arg0, type);
9939 CASE_FLT_FN (BUILT_IN_CEXP):
9940 return fold_builtin_cexp (loc, arg0, type);
9942 CASE_FLT_FN (BUILT_IN_CEXPI):
9943 if (validate_arg (arg0, REAL_TYPE))
9944 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9945 break;
9947 CASE_FLT_FN (BUILT_IN_SINH):
9948 if (validate_arg (arg0, REAL_TYPE))
9949 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9950 break;
9952 CASE_FLT_FN (BUILT_IN_COSH):
9953 return fold_builtin_cosh (loc, arg0, type, fndecl);
9955 CASE_FLT_FN (BUILT_IN_TANH):
9956 if (validate_arg (arg0, REAL_TYPE))
9957 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9958 break;
9960 CASE_FLT_FN (BUILT_IN_ERF):
9961 if (validate_arg (arg0, REAL_TYPE))
9962 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9963 break;
9965 CASE_FLT_FN (BUILT_IN_ERFC):
9966 if (validate_arg (arg0, REAL_TYPE))
9967 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9968 break;
9970 CASE_FLT_FN (BUILT_IN_TGAMMA):
9971 if (validate_arg (arg0, REAL_TYPE))
9972 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9973 break;
9975 CASE_FLT_FN (BUILT_IN_EXP):
9976 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9978 CASE_FLT_FN (BUILT_IN_EXP2):
9979 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9981 CASE_FLT_FN (BUILT_IN_EXP10):
9982 CASE_FLT_FN (BUILT_IN_POW10):
9983 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9985 CASE_FLT_FN (BUILT_IN_EXPM1):
9986 if (validate_arg (arg0, REAL_TYPE))
9987 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9988 break;
9990 CASE_FLT_FN (BUILT_IN_LOG):
9991 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9993 CASE_FLT_FN (BUILT_IN_LOG2):
9994 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9996 CASE_FLT_FN (BUILT_IN_LOG10):
9997 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9999 CASE_FLT_FN (BUILT_IN_LOG1P):
10000 if (validate_arg (arg0, REAL_TYPE))
10001 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10002 &dconstm1, NULL, false);
10003 break;
10005 CASE_FLT_FN (BUILT_IN_J0):
10006 if (validate_arg (arg0, REAL_TYPE))
10007 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10008 NULL, NULL, 0);
10009 break;
10011 CASE_FLT_FN (BUILT_IN_J1):
10012 if (validate_arg (arg0, REAL_TYPE))
10013 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10014 NULL, NULL, 0);
10015 break;
10017 CASE_FLT_FN (BUILT_IN_Y0):
10018 if (validate_arg (arg0, REAL_TYPE))
10019 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10020 &dconst0, NULL, false);
10021 break;
10023 CASE_FLT_FN (BUILT_IN_Y1):
10024 if (validate_arg (arg0, REAL_TYPE))
10025 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10026 &dconst0, NULL, false);
10027 break;
10029 CASE_FLT_FN (BUILT_IN_NAN):
10030 case BUILT_IN_NAND32:
10031 case BUILT_IN_NAND64:
10032 case BUILT_IN_NAND128:
10033 return fold_builtin_nan (arg0, type, true);
10035 CASE_FLT_FN (BUILT_IN_NANS):
10036 return fold_builtin_nan (arg0, type, false);
10038 CASE_FLT_FN (BUILT_IN_FLOOR):
10039 return fold_builtin_floor (loc, fndecl, arg0);
10041 CASE_FLT_FN (BUILT_IN_CEIL):
10042 return fold_builtin_ceil (loc, fndecl, arg0);
10044 CASE_FLT_FN (BUILT_IN_TRUNC):
10045 return fold_builtin_trunc (loc, fndecl, arg0);
10047 CASE_FLT_FN (BUILT_IN_ROUND):
10048 return fold_builtin_round (loc, fndecl, arg0);
10050 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10051 CASE_FLT_FN (BUILT_IN_RINT):
10052 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10054 CASE_FLT_FN (BUILT_IN_ICEIL):
10055 CASE_FLT_FN (BUILT_IN_LCEIL):
10056 CASE_FLT_FN (BUILT_IN_LLCEIL):
10057 CASE_FLT_FN (BUILT_IN_LFLOOR):
10058 CASE_FLT_FN (BUILT_IN_IFLOOR):
10059 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10060 CASE_FLT_FN (BUILT_IN_IROUND):
10061 CASE_FLT_FN (BUILT_IN_LROUND):
10062 CASE_FLT_FN (BUILT_IN_LLROUND):
10063 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10065 CASE_FLT_FN (BUILT_IN_IRINT):
10066 CASE_FLT_FN (BUILT_IN_LRINT):
10067 CASE_FLT_FN (BUILT_IN_LLRINT):
10068 return fold_fixed_mathfn (loc, fndecl, arg0);
10070 case BUILT_IN_BSWAP16:
10071 case BUILT_IN_BSWAP32:
10072 case BUILT_IN_BSWAP64:
10073 return fold_builtin_bswap (fndecl, arg0);
10075 CASE_INT_FN (BUILT_IN_FFS):
10076 CASE_INT_FN (BUILT_IN_CLZ):
10077 CASE_INT_FN (BUILT_IN_CTZ):
10078 CASE_INT_FN (BUILT_IN_CLRSB):
10079 CASE_INT_FN (BUILT_IN_POPCOUNT):
10080 CASE_INT_FN (BUILT_IN_PARITY):
10081 return fold_builtin_bitop (fndecl, arg0);
10083 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10084 return fold_builtin_signbit (loc, arg0, type);
10086 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10087 return fold_builtin_significand (loc, arg0, type);
10089 CASE_FLT_FN (BUILT_IN_ILOGB):
10090 CASE_FLT_FN (BUILT_IN_LOGB):
10091 return fold_builtin_logb (loc, arg0, type);
10093 case BUILT_IN_ISASCII:
10094 return fold_builtin_isascii (loc, arg0);
10096 case BUILT_IN_TOASCII:
10097 return fold_builtin_toascii (loc, arg0);
10099 case BUILT_IN_ISDIGIT:
10100 return fold_builtin_isdigit (loc, arg0);
10102 CASE_FLT_FN (BUILT_IN_FINITE):
10103 case BUILT_IN_FINITED32:
10104 case BUILT_IN_FINITED64:
10105 case BUILT_IN_FINITED128:
10106 case BUILT_IN_ISFINITE:
10108 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10109 if (ret)
10110 return ret;
10111 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10114 CASE_FLT_FN (BUILT_IN_ISINF):
10115 case BUILT_IN_ISINFD32:
10116 case BUILT_IN_ISINFD64:
10117 case BUILT_IN_ISINFD128:
10119 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10120 if (ret)
10121 return ret;
10122 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10125 case BUILT_IN_ISNORMAL:
10126 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10128 case BUILT_IN_ISINF_SIGN:
10129 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10131 CASE_FLT_FN (BUILT_IN_ISNAN):
10132 case BUILT_IN_ISNAND32:
10133 case BUILT_IN_ISNAND64:
10134 case BUILT_IN_ISNAND128:
10135 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10137 case BUILT_IN_PRINTF:
10138 case BUILT_IN_PRINTF_UNLOCKED:
10139 case BUILT_IN_VPRINTF:
10140 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10142 case BUILT_IN_FREE:
10143 if (integer_zerop (arg0))
10144 return build_empty_stmt (loc);
10145 break;
10147 default:
10148 break;
10151 return NULL_TREE;
10155 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10156 IGNORE is true if the result of the function call is ignored. This
10157 function returns NULL_TREE if no simplification was possible. */
10159 static tree
10160 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10162 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10163 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10165 switch (fcode)
10167 CASE_FLT_FN (BUILT_IN_JN):
10168 if (validate_arg (arg0, INTEGER_TYPE)
10169 && validate_arg (arg1, REAL_TYPE))
10170 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10171 break;
10173 CASE_FLT_FN (BUILT_IN_YN):
10174 if (validate_arg (arg0, INTEGER_TYPE)
10175 && validate_arg (arg1, REAL_TYPE))
10176 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10177 &dconst0, false);
10178 break;
10180 CASE_FLT_FN (BUILT_IN_DREM):
10181 CASE_FLT_FN (BUILT_IN_REMAINDER):
10182 if (validate_arg (arg0, REAL_TYPE)
10183 && validate_arg (arg1, REAL_TYPE))
10184 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10185 break;
10187 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10188 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10189 if (validate_arg (arg0, REAL_TYPE)
10190 && validate_arg (arg1, POINTER_TYPE))
10191 return do_mpfr_lgamma_r (arg0, arg1, type);
10192 break;
10194 CASE_FLT_FN (BUILT_IN_ATAN2):
10195 if (validate_arg (arg0, REAL_TYPE)
10196 && validate_arg (arg1, REAL_TYPE))
10197 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10198 break;
10200 CASE_FLT_FN (BUILT_IN_FDIM):
10201 if (validate_arg (arg0, REAL_TYPE)
10202 && validate_arg (arg1, REAL_TYPE))
10203 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10204 break;
10206 CASE_FLT_FN (BUILT_IN_HYPOT):
10207 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10209 CASE_FLT_FN (BUILT_IN_CPOW):
10210 if (validate_arg (arg0, COMPLEX_TYPE)
10211 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10212 && validate_arg (arg1, COMPLEX_TYPE)
10213 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10214 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10215 break;
10217 CASE_FLT_FN (BUILT_IN_LDEXP):
10218 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10219 CASE_FLT_FN (BUILT_IN_SCALBN):
10220 CASE_FLT_FN (BUILT_IN_SCALBLN):
10221 return fold_builtin_load_exponent (loc, arg0, arg1,
10222 type, /*ldexp=*/false);
10224 CASE_FLT_FN (BUILT_IN_FREXP):
10225 return fold_builtin_frexp (loc, arg0, arg1, type);
10227 CASE_FLT_FN (BUILT_IN_MODF):
10228 return fold_builtin_modf (loc, arg0, arg1, type);
10230 case BUILT_IN_STRSTR:
10231 return fold_builtin_strstr (loc, arg0, arg1, type);
10233 case BUILT_IN_STRSPN:
10234 return fold_builtin_strspn (loc, arg0, arg1);
10236 case BUILT_IN_STRCSPN:
10237 return fold_builtin_strcspn (loc, arg0, arg1);
10239 case BUILT_IN_STRCHR:
10240 case BUILT_IN_INDEX:
10241 return fold_builtin_strchr (loc, arg0, arg1, type);
10243 case BUILT_IN_STRRCHR:
10244 case BUILT_IN_RINDEX:
10245 return fold_builtin_strrchr (loc, arg0, arg1, type);
10247 case BUILT_IN_STPCPY:
10248 if (ignore)
10250 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10251 if (!fn)
10252 break;
10254 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10256 else
10257 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10258 break;
10260 case BUILT_IN_STRCMP:
10261 return fold_builtin_strcmp (loc, arg0, arg1);
10263 case BUILT_IN_STRPBRK:
10264 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10266 case BUILT_IN_EXPECT:
10267 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10269 CASE_FLT_FN (BUILT_IN_POW):
10270 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10272 CASE_FLT_FN (BUILT_IN_POWI):
10273 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10275 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10276 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10278 CASE_FLT_FN (BUILT_IN_FMIN):
10279 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10281 CASE_FLT_FN (BUILT_IN_FMAX):
10282 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10284 case BUILT_IN_ISGREATER:
10285 return fold_builtin_unordered_cmp (loc, fndecl,
10286 arg0, arg1, UNLE_EXPR, LE_EXPR);
10287 case BUILT_IN_ISGREATEREQUAL:
10288 return fold_builtin_unordered_cmp (loc, fndecl,
10289 arg0, arg1, UNLT_EXPR, LT_EXPR);
10290 case BUILT_IN_ISLESS:
10291 return fold_builtin_unordered_cmp (loc, fndecl,
10292 arg0, arg1, UNGE_EXPR, GE_EXPR);
10293 case BUILT_IN_ISLESSEQUAL:
10294 return fold_builtin_unordered_cmp (loc, fndecl,
10295 arg0, arg1, UNGT_EXPR, GT_EXPR);
10296 case BUILT_IN_ISLESSGREATER:
10297 return fold_builtin_unordered_cmp (loc, fndecl,
10298 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10299 case BUILT_IN_ISUNORDERED:
10300 return fold_builtin_unordered_cmp (loc, fndecl,
10301 arg0, arg1, UNORDERED_EXPR,
10302 NOP_EXPR);
10304 /* We do the folding for va_start in the expander. */
10305 case BUILT_IN_VA_START:
10306 break;
10308 case BUILT_IN_OBJECT_SIZE:
10309 return fold_builtin_object_size (arg0, arg1);
10311 case BUILT_IN_PRINTF:
10312 case BUILT_IN_PRINTF_UNLOCKED:
10313 case BUILT_IN_VPRINTF:
10314 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10316 case BUILT_IN_PRINTF_CHK:
10317 case BUILT_IN_VPRINTF_CHK:
10318 if (!validate_arg (arg0, INTEGER_TYPE)
10319 || TREE_SIDE_EFFECTS (arg0))
10320 return NULL_TREE;
10321 else
10322 return fold_builtin_printf (loc, fndecl,
10323 arg1, NULL_TREE, ignore, fcode);
10324 break;
10326 case BUILT_IN_FPRINTF:
10327 case BUILT_IN_FPRINTF_UNLOCKED:
10328 case BUILT_IN_VFPRINTF:
10329 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10330 ignore, fcode);
10332 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10333 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10335 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10336 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10338 default:
10339 break;
10341 return NULL_TREE;
10344 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10345 and ARG2. IGNORE is true if the result of the function call is ignored.
10346 This function returns NULL_TREE if no simplification was possible. */
10348 static tree
10349 fold_builtin_3 (location_t loc, tree fndecl,
10350 tree arg0, tree arg1, tree arg2, bool ignore)
10352 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10353 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10354 switch (fcode)
10357 CASE_FLT_FN (BUILT_IN_SINCOS):
10358 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10360 CASE_FLT_FN (BUILT_IN_FMA):
10361 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10362 break;
10364 CASE_FLT_FN (BUILT_IN_REMQUO):
10365 if (validate_arg (arg0, REAL_TYPE)
10366 && validate_arg (arg1, REAL_TYPE)
10367 && validate_arg (arg2, POINTER_TYPE))
10368 return do_mpfr_remquo (arg0, arg1, arg2);
10369 break;
10371 case BUILT_IN_STRNCAT:
10372 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10374 case BUILT_IN_STRNCMP:
10375 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10377 case BUILT_IN_MEMCHR:
10378 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10380 case BUILT_IN_BCMP:
10381 case BUILT_IN_MEMCMP:
10382 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10384 case BUILT_IN_PRINTF_CHK:
10385 case BUILT_IN_VPRINTF_CHK:
10386 if (!validate_arg (arg0, INTEGER_TYPE)
10387 || TREE_SIDE_EFFECTS (arg0))
10388 return NULL_TREE;
10389 else
10390 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10391 break;
10393 case BUILT_IN_FPRINTF:
10394 case BUILT_IN_FPRINTF_UNLOCKED:
10395 case BUILT_IN_VFPRINTF:
10396 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10397 ignore, fcode);
10399 case BUILT_IN_FPRINTF_CHK:
10400 case BUILT_IN_VFPRINTF_CHK:
10401 if (!validate_arg (arg1, INTEGER_TYPE)
10402 || TREE_SIDE_EFFECTS (arg1))
10403 return NULL_TREE;
10404 else
10405 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10406 ignore, fcode);
10408 case BUILT_IN_EXPECT:
10409 return fold_builtin_expect (loc, arg0, arg1, arg2);
10411 default:
10412 break;
10414 return NULL_TREE;
10417 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10418 ARG2, and ARG3. IGNORE is true if the result of the function call is
10419 ignored. This function returns NULL_TREE if no simplification was
10420 possible. */
10422 static tree
10423 fold_builtin_4 (location_t loc, tree fndecl,
10424 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10426 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10428 switch (fcode)
10430 case BUILT_IN_FPRINTF_CHK:
10431 case BUILT_IN_VFPRINTF_CHK:
10432 if (!validate_arg (arg1, INTEGER_TYPE)
10433 || TREE_SIDE_EFFECTS (arg1))
10434 return NULL_TREE;
10435 else
10436 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10437 ignore, fcode);
10438 break;
10440 default:
10441 break;
10443 return NULL_TREE;
10446 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10447 arguments, where NARGS <= 4. IGNORE is true if the result of the
10448 function call is ignored. This function returns NULL_TREE if no
10449 simplification was possible. Note that this only folds builtins with
10450 fixed argument patterns. Foldings that do varargs-to-varargs
10451 transformations, or that match calls with more than 4 arguments,
10452 need to be handled with fold_builtin_varargs instead. */
10454 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10456 tree
10457 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10459 tree ret = NULL_TREE;
10461 switch (nargs)
10463 case 0:
10464 ret = fold_builtin_0 (loc, fndecl, ignore);
10465 break;
10466 case 1:
10467 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10468 break;
10469 case 2:
10470 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10471 break;
10472 case 3:
10473 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10474 break;
10475 case 4:
10476 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10477 ignore);
10478 break;
10479 default:
10480 break;
10482 if (ret)
10484 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10485 SET_EXPR_LOCATION (ret, loc);
10486 TREE_NO_WARNING (ret) = 1;
10487 return ret;
10489 return NULL_TREE;
10492 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10493 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10494 of arguments in ARGS to be omitted. OLDNARGS is the number of
10495 elements in ARGS. */
10497 static tree
10498 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10499 int skip, tree fndecl, int n, va_list newargs)
10501 int nargs = oldnargs - skip + n;
10502 tree *buffer;
10504 if (n > 0)
10506 int i, j;
10508 buffer = XALLOCAVEC (tree, nargs);
10509 for (i = 0; i < n; i++)
10510 buffer[i] = va_arg (newargs, tree);
10511 for (j = skip; j < oldnargs; j++, i++)
10512 buffer[i] = args[j];
10514 else
10515 buffer = args + skip;
10517 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10520 /* Return true if FNDECL shouldn't be folded right now.
10521 If a built-in function has an inline attribute always_inline
10522 wrapper, defer folding it after always_inline functions have
10523 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10524 might not be performed. */
10526 bool
10527 avoid_folding_inline_builtin (tree fndecl)
10529 return (DECL_DECLARED_INLINE_P (fndecl)
10530 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10531 && cfun
10532 && !cfun->always_inline_functions_inlined
10533 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10536 /* A wrapper function for builtin folding that prevents warnings for
10537 "statement without effect" and the like, caused by removing the
10538 call node earlier than the warning is generated. */
10540 tree
10541 fold_call_expr (location_t loc, tree exp, bool ignore)
10543 tree ret = NULL_TREE;
10544 tree fndecl = get_callee_fndecl (exp);
10545 if (fndecl
10546 && TREE_CODE (fndecl) == FUNCTION_DECL
10547 && DECL_BUILT_IN (fndecl)
10548 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10549 yet. Defer folding until we see all the arguments
10550 (after inlining). */
10551 && !CALL_EXPR_VA_ARG_PACK (exp))
10553 int nargs = call_expr_nargs (exp);
10555 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10556 instead last argument is __builtin_va_arg_pack (). Defer folding
10557 even in that case, until arguments are finalized. */
10558 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10560 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10561 if (fndecl2
10562 && TREE_CODE (fndecl2) == FUNCTION_DECL
10563 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10564 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10565 return NULL_TREE;
10568 if (avoid_folding_inline_builtin (fndecl))
10569 return NULL_TREE;
10571 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10572 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10573 CALL_EXPR_ARGP (exp), ignore);
10574 else
10576 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10578 tree *args = CALL_EXPR_ARGP (exp);
10579 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10581 if (!ret)
10582 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10583 if (ret)
10584 return ret;
10587 return NULL_TREE;
10590 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10591 N arguments are passed in the array ARGARRAY. */
10593 tree
10594 fold_builtin_call_array (location_t loc, tree type,
10595 tree fn,
10596 int n,
10597 tree *argarray)
10599 tree ret = NULL_TREE;
10600 tree exp;
10602 if (TREE_CODE (fn) == ADDR_EXPR)
10604 tree fndecl = TREE_OPERAND (fn, 0);
10605 if (TREE_CODE (fndecl) == FUNCTION_DECL
10606 && DECL_BUILT_IN (fndecl))
10608 /* If last argument is __builtin_va_arg_pack (), arguments to this
10609 function are not finalized yet. Defer folding until they are. */
10610 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10612 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10613 if (fndecl2
10614 && TREE_CODE (fndecl2) == FUNCTION_DECL
10615 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10616 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10617 return build_call_array_loc (loc, type, fn, n, argarray);
10619 if (avoid_folding_inline_builtin (fndecl))
10620 return build_call_array_loc (loc, type, fn, n, argarray);
10621 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10623 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10624 if (ret)
10625 return ret;
10627 return build_call_array_loc (loc, type, fn, n, argarray);
10629 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10631 /* First try the transformations that don't require consing up
10632 an exp. */
10633 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10634 if (ret)
10635 return ret;
10638 /* If we got this far, we need to build an exp. */
10639 exp = build_call_array_loc (loc, type, fn, n, argarray);
10640 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10641 return ret ? ret : exp;
10645 return build_call_array_loc (loc, type, fn, n, argarray);
10648 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10649 along with N new arguments specified as the "..." parameters. SKIP
10650 is the number of arguments in EXP to be omitted. This function is used
10651 to do varargs-to-varargs transformations. */
10653 static tree
10654 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10656 va_list ap;
10657 tree t;
10659 va_start (ap, n);
10660 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10661 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10662 va_end (ap);
10664 return t;
10667 /* Validate a single argument ARG against a tree code CODE representing
10668 a type. */
10670 static bool
10671 validate_arg (const_tree arg, enum tree_code code)
10673 if (!arg)
10674 return false;
10675 else if (code == POINTER_TYPE)
10676 return POINTER_TYPE_P (TREE_TYPE (arg));
10677 else if (code == INTEGER_TYPE)
10678 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10679 return code == TREE_CODE (TREE_TYPE (arg));
10682 /* This function validates the types of a function call argument list
10683 against a specified list of tree_codes. If the last specifier is a 0,
10684 that represents an ellipses, otherwise the last specifier must be a
10685 VOID_TYPE.
10687 This is the GIMPLE version of validate_arglist. Eventually we want to
10688 completely convert builtins.c to work from GIMPLEs and the tree based
10689 validate_arglist will then be removed. */
10691 bool
10692 validate_gimple_arglist (const_gimple call, ...)
10694 enum tree_code code;
10695 bool res = 0;
10696 va_list ap;
10697 const_tree arg;
10698 size_t i;
10700 va_start (ap, call);
10701 i = 0;
10705 code = (enum tree_code) va_arg (ap, int);
10706 switch (code)
10708 case 0:
10709 /* This signifies an ellipses, any further arguments are all ok. */
10710 res = true;
10711 goto end;
10712 case VOID_TYPE:
10713 /* This signifies an endlink, if no arguments remain, return
10714 true, otherwise return false. */
10715 res = (i == gimple_call_num_args (call));
10716 goto end;
10717 default:
10718 /* If no parameters remain or the parameter's code does not
10719 match the specified code, return false. Otherwise continue
10720 checking any remaining arguments. */
10721 arg = gimple_call_arg (call, i++);
10722 if (!validate_arg (arg, code))
10723 goto end;
10724 break;
10727 while (1);
10729 /* We need gotos here since we can only have one VA_CLOSE in a
10730 function. */
10731 end: ;
10732 va_end (ap);
10734 return res;
10737 /* Default target-specific builtin expander that does nothing. */
10740 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10741 rtx target ATTRIBUTE_UNUSED,
10742 rtx subtarget ATTRIBUTE_UNUSED,
10743 machine_mode mode ATTRIBUTE_UNUSED,
10744 int ignore ATTRIBUTE_UNUSED)
10746 return NULL_RTX;
10749 /* Returns true is EXP represents data that would potentially reside
10750 in a readonly section. */
10752 bool
10753 readonly_data_expr (tree exp)
10755 STRIP_NOPS (exp);
10757 if (TREE_CODE (exp) != ADDR_EXPR)
10758 return false;
10760 exp = get_base_address (TREE_OPERAND (exp, 0));
10761 if (!exp)
10762 return false;
10764 /* Make sure we call decl_readonly_section only for trees it
10765 can handle (since it returns true for everything it doesn't
10766 understand). */
10767 if (TREE_CODE (exp) == STRING_CST
10768 || TREE_CODE (exp) == CONSTRUCTOR
10769 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10770 return decl_readonly_section (exp, 0);
10771 else
10772 return false;
10775 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10776 to the call, and TYPE is its return type.
10778 Return NULL_TREE if no simplification was possible, otherwise return the
10779 simplified form of the call as a tree.
10781 The simplified form may be a constant or other expression which
10782 computes the same value, but in a more efficient manner (including
10783 calls to other builtin functions).
10785 The call may contain arguments which need to be evaluated, but
10786 which are not useful to determine the result of the call. In
10787 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10788 COMPOUND_EXPR will be an argument which must be evaluated.
10789 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10790 COMPOUND_EXPR in the chain will contain the tree for the simplified
10791 form of the builtin function call. */
10793 static tree
10794 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10796 if (!validate_arg (s1, POINTER_TYPE)
10797 || !validate_arg (s2, POINTER_TYPE))
10798 return NULL_TREE;
10799 else
10801 tree fn;
10802 const char *p1, *p2;
10804 p2 = c_getstr (s2);
10805 if (p2 == NULL)
10806 return NULL_TREE;
10808 p1 = c_getstr (s1);
10809 if (p1 != NULL)
10811 const char *r = strstr (p1, p2);
10812 tree tem;
10814 if (r == NULL)
10815 return build_int_cst (TREE_TYPE (s1), 0);
10817 /* Return an offset into the constant string argument. */
10818 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10819 return fold_convert_loc (loc, type, tem);
10822 /* The argument is const char *, and the result is char *, so we need
10823 a type conversion here to avoid a warning. */
10824 if (p2[0] == '\0')
10825 return fold_convert_loc (loc, type, s1);
10827 if (p2[1] != '\0')
10828 return NULL_TREE;
10830 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10831 if (!fn)
10832 return NULL_TREE;
10834 /* New argument list transforming strstr(s1, s2) to
10835 strchr(s1, s2[0]). */
10836 return build_call_expr_loc (loc, fn, 2, s1,
10837 build_int_cst (integer_type_node, p2[0]));
10841 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10842 the call, and TYPE is its return type.
10844 Return NULL_TREE if no simplification was possible, otherwise return the
10845 simplified form of the call as a tree.
10847 The simplified form may be a constant or other expression which
10848 computes the same value, but in a more efficient manner (including
10849 calls to other builtin functions).
10851 The call may contain arguments which need to be evaluated, but
10852 which are not useful to determine the result of the call. In
10853 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10854 COMPOUND_EXPR will be an argument which must be evaluated.
10855 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10856 COMPOUND_EXPR in the chain will contain the tree for the simplified
10857 form of the builtin function call. */
10859 static tree
10860 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10862 if (!validate_arg (s1, POINTER_TYPE)
10863 || !validate_arg (s2, INTEGER_TYPE))
10864 return NULL_TREE;
10865 else
10867 const char *p1;
10869 if (TREE_CODE (s2) != INTEGER_CST)
10870 return NULL_TREE;
10872 p1 = c_getstr (s1);
10873 if (p1 != NULL)
10875 char c;
10876 const char *r;
10877 tree tem;
10879 if (target_char_cast (s2, &c))
10880 return NULL_TREE;
10882 r = strchr (p1, c);
10884 if (r == NULL)
10885 return build_int_cst (TREE_TYPE (s1), 0);
10887 /* Return an offset into the constant string argument. */
10888 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10889 return fold_convert_loc (loc, type, tem);
10891 return NULL_TREE;
10895 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10896 the call, and TYPE is its return type.
10898 Return NULL_TREE if no simplification was possible, otherwise return the
10899 simplified form of the call as a tree.
10901 The simplified form may be a constant or other expression which
10902 computes the same value, but in a more efficient manner (including
10903 calls to other builtin functions).
10905 The call may contain arguments which need to be evaluated, but
10906 which are not useful to determine the result of the call. In
10907 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10908 COMPOUND_EXPR will be an argument which must be evaluated.
10909 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10910 COMPOUND_EXPR in the chain will contain the tree for the simplified
10911 form of the builtin function call. */
10913 static tree
10914 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10916 if (!validate_arg (s1, POINTER_TYPE)
10917 || !validate_arg (s2, INTEGER_TYPE))
10918 return NULL_TREE;
10919 else
10921 tree fn;
10922 const char *p1;
10924 if (TREE_CODE (s2) != INTEGER_CST)
10925 return NULL_TREE;
10927 p1 = c_getstr (s1);
10928 if (p1 != NULL)
10930 char c;
10931 const char *r;
10932 tree tem;
10934 if (target_char_cast (s2, &c))
10935 return NULL_TREE;
10937 r = strrchr (p1, c);
10939 if (r == NULL)
10940 return build_int_cst (TREE_TYPE (s1), 0);
10942 /* Return an offset into the constant string argument. */
10943 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10944 return fold_convert_loc (loc, type, tem);
10947 if (! integer_zerop (s2))
10948 return NULL_TREE;
10950 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10951 if (!fn)
10952 return NULL_TREE;
10954 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10955 return build_call_expr_loc (loc, fn, 2, s1, s2);
10959 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10960 to the call, and TYPE is its return type.
10962 Return NULL_TREE if no simplification was possible, otherwise return the
10963 simplified form of the call as a tree.
10965 The simplified form may be a constant or other expression which
10966 computes the same value, but in a more efficient manner (including
10967 calls to other builtin functions).
10969 The call may contain arguments which need to be evaluated, but
10970 which are not useful to determine the result of the call. In
10971 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10972 COMPOUND_EXPR will be an argument which must be evaluated.
10973 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10974 COMPOUND_EXPR in the chain will contain the tree for the simplified
10975 form of the builtin function call. */
10977 static tree
10978 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10980 if (!validate_arg (s1, POINTER_TYPE)
10981 || !validate_arg (s2, POINTER_TYPE))
10982 return NULL_TREE;
10983 else
10985 tree fn;
10986 const char *p1, *p2;
10988 p2 = c_getstr (s2);
10989 if (p2 == NULL)
10990 return NULL_TREE;
10992 p1 = c_getstr (s1);
10993 if (p1 != NULL)
10995 const char *r = strpbrk (p1, p2);
10996 tree tem;
10998 if (r == NULL)
10999 return build_int_cst (TREE_TYPE (s1), 0);
11001 /* Return an offset into the constant string argument. */
11002 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11003 return fold_convert_loc (loc, type, tem);
11006 if (p2[0] == '\0')
11007 /* strpbrk(x, "") == NULL.
11008 Evaluate and ignore s1 in case it had side-effects. */
11009 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11011 if (p2[1] != '\0')
11012 return NULL_TREE; /* Really call strpbrk. */
11014 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11015 if (!fn)
11016 return NULL_TREE;
11018 /* New argument list transforming strpbrk(s1, s2) to
11019 strchr(s1, s2[0]). */
11020 return build_call_expr_loc (loc, fn, 2, s1,
11021 build_int_cst (integer_type_node, p2[0]));
11025 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11026 arguments to the call.
11028 Return NULL_TREE if no simplification was possible, otherwise return the
11029 simplified form of the call as a tree.
11031 The simplified form may be a constant or other expression which
11032 computes the same value, but in a more efficient manner (including
11033 calls to other builtin functions).
11035 The call may contain arguments which need to be evaluated, but
11036 which are not useful to determine the result of the call. In
11037 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11038 COMPOUND_EXPR will be an argument which must be evaluated.
11039 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11040 COMPOUND_EXPR in the chain will contain the tree for the simplified
11041 form of the builtin function call. */
11043 static tree
11044 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11046 if (!validate_arg (dst, POINTER_TYPE)
11047 || !validate_arg (src, POINTER_TYPE)
11048 || !validate_arg (len, INTEGER_TYPE))
11049 return NULL_TREE;
11050 else
11052 const char *p = c_getstr (src);
11054 /* If the requested length is zero, or the src parameter string
11055 length is zero, return the dst parameter. */
11056 if (integer_zerop (len) || (p && *p == '\0'))
11057 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11059 /* If the requested len is greater than or equal to the string
11060 length, call strcat. */
11061 if (TREE_CODE (len) == INTEGER_CST && p
11062 && compare_tree_int (len, strlen (p)) >= 0)
11064 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11066 /* If the replacement _DECL isn't initialized, don't do the
11067 transformation. */
11068 if (!fn)
11069 return NULL_TREE;
11071 return build_call_expr_loc (loc, fn, 2, dst, src);
11073 return NULL_TREE;
11077 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11078 to the call.
11080 Return NULL_TREE if no simplification was possible, otherwise return the
11081 simplified form of the call as a tree.
11083 The simplified form may be a constant or other expression which
11084 computes the same value, but in a more efficient manner (including
11085 calls to other builtin functions).
11087 The call may contain arguments which need to be evaluated, but
11088 which are not useful to determine the result of the call. In
11089 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11090 COMPOUND_EXPR will be an argument which must be evaluated.
11091 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11092 COMPOUND_EXPR in the chain will contain the tree for the simplified
11093 form of the builtin function call. */
11095 static tree
11096 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11098 if (!validate_arg (s1, POINTER_TYPE)
11099 || !validate_arg (s2, POINTER_TYPE))
11100 return NULL_TREE;
11101 else
11103 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11105 /* If both arguments are constants, evaluate at compile-time. */
11106 if (p1 && p2)
11108 const size_t r = strspn (p1, p2);
11109 return build_int_cst (size_type_node, r);
11112 /* If either argument is "", return NULL_TREE. */
11113 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11114 /* Evaluate and ignore both arguments in case either one has
11115 side-effects. */
11116 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11117 s1, s2);
11118 return NULL_TREE;
11122 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11123 to the call.
11125 Return NULL_TREE if no simplification was possible, otherwise return the
11126 simplified form of the call as a tree.
11128 The simplified form may be a constant or other expression which
11129 computes the same value, but in a more efficient manner (including
11130 calls to other builtin functions).
11132 The call may contain arguments which need to be evaluated, but
11133 which are not useful to determine the result of the call. In
11134 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11135 COMPOUND_EXPR will be an argument which must be evaluated.
11136 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11137 COMPOUND_EXPR in the chain will contain the tree for the simplified
11138 form of the builtin function call. */
11140 static tree
11141 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11143 if (!validate_arg (s1, POINTER_TYPE)
11144 || !validate_arg (s2, POINTER_TYPE))
11145 return NULL_TREE;
11146 else
11148 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11150 /* If both arguments are constants, evaluate at compile-time. */
11151 if (p1 && p2)
11153 const size_t r = strcspn (p1, p2);
11154 return build_int_cst (size_type_node, r);
11157 /* If the first argument is "", return NULL_TREE. */
11158 if (p1 && *p1 == '\0')
11160 /* Evaluate and ignore argument s2 in case it has
11161 side-effects. */
11162 return omit_one_operand_loc (loc, size_type_node,
11163 size_zero_node, s2);
11166 /* If the second argument is "", return __builtin_strlen(s1). */
11167 if (p2 && *p2 == '\0')
11169 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11171 /* If the replacement _DECL isn't initialized, don't do the
11172 transformation. */
11173 if (!fn)
11174 return NULL_TREE;
11176 return build_call_expr_loc (loc, fn, 1, s1);
11178 return NULL_TREE;
11182 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11183 produced. False otherwise. This is done so that we don't output the error
11184 or warning twice or three times. */
11186 bool
11187 fold_builtin_next_arg (tree exp, bool va_start_p)
11189 tree fntype = TREE_TYPE (current_function_decl);
11190 int nargs = call_expr_nargs (exp);
11191 tree arg;
11192 /* There is good chance the current input_location points inside the
11193 definition of the va_start macro (perhaps on the token for
11194 builtin) in a system header, so warnings will not be emitted.
11195 Use the location in real source code. */
11196 source_location current_location =
11197 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11198 NULL);
11200 if (!stdarg_p (fntype))
11202 error ("%<va_start%> used in function with fixed args");
11203 return true;
11206 if (va_start_p)
11208 if (va_start_p && (nargs != 2))
11210 error ("wrong number of arguments to function %<va_start%>");
11211 return true;
11213 arg = CALL_EXPR_ARG (exp, 1);
11215 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11216 when we checked the arguments and if needed issued a warning. */
11217 else
11219 if (nargs == 0)
11221 /* Evidently an out of date version of <stdarg.h>; can't validate
11222 va_start's second argument, but can still work as intended. */
11223 warning_at (current_location,
11224 OPT_Wvarargs,
11225 "%<__builtin_next_arg%> called without an argument");
11226 return true;
11228 else if (nargs > 1)
11230 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11231 return true;
11233 arg = CALL_EXPR_ARG (exp, 0);
11236 if (TREE_CODE (arg) == SSA_NAME)
11237 arg = SSA_NAME_VAR (arg);
11239 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11240 or __builtin_next_arg (0) the first time we see it, after checking
11241 the arguments and if needed issuing a warning. */
11242 if (!integer_zerop (arg))
11244 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11246 /* Strip off all nops for the sake of the comparison. This
11247 is not quite the same as STRIP_NOPS. It does more.
11248 We must also strip off INDIRECT_EXPR for C++ reference
11249 parameters. */
11250 while (CONVERT_EXPR_P (arg)
11251 || TREE_CODE (arg) == INDIRECT_REF)
11252 arg = TREE_OPERAND (arg, 0);
11253 if (arg != last_parm)
11255 /* FIXME: Sometimes with the tree optimizers we can get the
11256 not the last argument even though the user used the last
11257 argument. We just warn and set the arg to be the last
11258 argument so that we will get wrong-code because of
11259 it. */
11260 warning_at (current_location,
11261 OPT_Wvarargs,
11262 "second parameter of %<va_start%> not last named argument");
11265 /* Undefined by C99 7.15.1.4p4 (va_start):
11266 "If the parameter parmN is declared with the register storage
11267 class, with a function or array type, or with a type that is
11268 not compatible with the type that results after application of
11269 the default argument promotions, the behavior is undefined."
11271 else if (DECL_REGISTER (arg))
11273 warning_at (current_location,
11274 OPT_Wvarargs,
11275 "undefined behaviour when second parameter of "
11276 "%<va_start%> is declared with %<register%> storage");
11279 /* We want to verify the second parameter just once before the tree
11280 optimizers are run and then avoid keeping it in the tree,
11281 as otherwise we could warn even for correct code like:
11282 void foo (int i, ...)
11283 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11284 if (va_start_p)
11285 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11286 else
11287 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11289 return false;
11293 /* Expand a call EXP to __builtin_object_size. */
11295 static rtx
11296 expand_builtin_object_size (tree exp)
11298 tree ost;
11299 int object_size_type;
11300 tree fndecl = get_callee_fndecl (exp);
11302 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11304 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11305 exp, fndecl);
11306 expand_builtin_trap ();
11307 return const0_rtx;
11310 ost = CALL_EXPR_ARG (exp, 1);
11311 STRIP_NOPS (ost);
11313 if (TREE_CODE (ost) != INTEGER_CST
11314 || tree_int_cst_sgn (ost) < 0
11315 || compare_tree_int (ost, 3) > 0)
11317 error ("%Klast argument of %D is not integer constant between 0 and 3",
11318 exp, fndecl);
11319 expand_builtin_trap ();
11320 return const0_rtx;
11323 object_size_type = tree_to_shwi (ost);
11325 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11328 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11329 FCODE is the BUILT_IN_* to use.
11330 Return NULL_RTX if we failed; the caller should emit a normal call,
11331 otherwise try to get the result in TARGET, if convenient (and in
11332 mode MODE if that's convenient). */
11334 static rtx
11335 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11336 enum built_in_function fcode)
11338 tree dest, src, len, size;
11340 if (!validate_arglist (exp,
11341 POINTER_TYPE,
11342 fcode == BUILT_IN_MEMSET_CHK
11343 ? INTEGER_TYPE : POINTER_TYPE,
11344 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11345 return NULL_RTX;
11347 dest = CALL_EXPR_ARG (exp, 0);
11348 src = CALL_EXPR_ARG (exp, 1);
11349 len = CALL_EXPR_ARG (exp, 2);
11350 size = CALL_EXPR_ARG (exp, 3);
11352 if (! tree_fits_uhwi_p (size))
11353 return NULL_RTX;
11355 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11357 tree fn;
11359 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11361 warning_at (tree_nonartificial_location (exp),
11362 0, "%Kcall to %D will always overflow destination buffer",
11363 exp, get_callee_fndecl (exp));
11364 return NULL_RTX;
11367 fn = NULL_TREE;
11368 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11369 mem{cpy,pcpy,move,set} is available. */
11370 switch (fcode)
11372 case BUILT_IN_MEMCPY_CHK:
11373 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11374 break;
11375 case BUILT_IN_MEMPCPY_CHK:
11376 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11377 break;
11378 case BUILT_IN_MEMMOVE_CHK:
11379 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11380 break;
11381 case BUILT_IN_MEMSET_CHK:
11382 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11383 break;
11384 default:
11385 break;
11388 if (! fn)
11389 return NULL_RTX;
11391 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11392 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11393 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11394 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11396 else if (fcode == BUILT_IN_MEMSET_CHK)
11397 return NULL_RTX;
11398 else
11400 unsigned int dest_align = get_pointer_alignment (dest);
11402 /* If DEST is not a pointer type, call the normal function. */
11403 if (dest_align == 0)
11404 return NULL_RTX;
11406 /* If SRC and DEST are the same (and not volatile), do nothing. */
11407 if (operand_equal_p (src, dest, 0))
11409 tree expr;
11411 if (fcode != BUILT_IN_MEMPCPY_CHK)
11413 /* Evaluate and ignore LEN in case it has side-effects. */
11414 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11415 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11418 expr = fold_build_pointer_plus (dest, len);
11419 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11422 /* __memmove_chk special case. */
11423 if (fcode == BUILT_IN_MEMMOVE_CHK)
11425 unsigned int src_align = get_pointer_alignment (src);
11427 if (src_align == 0)
11428 return NULL_RTX;
11430 /* If src is categorized for a readonly section we can use
11431 normal __memcpy_chk. */
11432 if (readonly_data_expr (src))
11434 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11435 if (!fn)
11436 return NULL_RTX;
11437 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11438 dest, src, len, size);
11439 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11440 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11441 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11444 return NULL_RTX;
11448 /* Emit warning if a buffer overflow is detected at compile time. */
11450 static void
11451 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11453 int is_strlen = 0;
11454 tree len, size;
11455 location_t loc = tree_nonartificial_location (exp);
11457 switch (fcode)
11459 case BUILT_IN_STRCPY_CHK:
11460 case BUILT_IN_STPCPY_CHK:
11461 /* For __strcat_chk the warning will be emitted only if overflowing
11462 by at least strlen (dest) + 1 bytes. */
11463 case BUILT_IN_STRCAT_CHK:
11464 len = CALL_EXPR_ARG (exp, 1);
11465 size = CALL_EXPR_ARG (exp, 2);
11466 is_strlen = 1;
11467 break;
11468 case BUILT_IN_STRNCAT_CHK:
11469 case BUILT_IN_STRNCPY_CHK:
11470 case BUILT_IN_STPNCPY_CHK:
11471 len = CALL_EXPR_ARG (exp, 2);
11472 size = CALL_EXPR_ARG (exp, 3);
11473 break;
11474 case BUILT_IN_SNPRINTF_CHK:
11475 case BUILT_IN_VSNPRINTF_CHK:
11476 len = CALL_EXPR_ARG (exp, 1);
11477 size = CALL_EXPR_ARG (exp, 3);
11478 break;
11479 default:
11480 gcc_unreachable ();
11483 if (!len || !size)
11484 return;
11486 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11487 return;
11489 if (is_strlen)
11491 len = c_strlen (len, 1);
11492 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11493 return;
11495 else if (fcode == BUILT_IN_STRNCAT_CHK)
11497 tree src = CALL_EXPR_ARG (exp, 1);
11498 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11499 return;
11500 src = c_strlen (src, 1);
11501 if (! src || ! tree_fits_uhwi_p (src))
11503 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11504 exp, get_callee_fndecl (exp));
11505 return;
11507 else if (tree_int_cst_lt (src, size))
11508 return;
11510 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11511 return;
11513 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11514 exp, get_callee_fndecl (exp));
11517 /* Emit warning if a buffer overflow is detected at compile time
11518 in __sprintf_chk/__vsprintf_chk calls. */
11520 static void
11521 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11523 tree size, len, fmt;
11524 const char *fmt_str;
11525 int nargs = call_expr_nargs (exp);
11527 /* Verify the required arguments in the original call. */
11529 if (nargs < 4)
11530 return;
11531 size = CALL_EXPR_ARG (exp, 2);
11532 fmt = CALL_EXPR_ARG (exp, 3);
11534 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11535 return;
11537 /* Check whether the format is a literal string constant. */
11538 fmt_str = c_getstr (fmt);
11539 if (fmt_str == NULL)
11540 return;
11542 if (!init_target_chars ())
11543 return;
11545 /* If the format doesn't contain % args or %%, we know its size. */
11546 if (strchr (fmt_str, target_percent) == 0)
11547 len = build_int_cstu (size_type_node, strlen (fmt_str));
11548 /* If the format is "%s" and first ... argument is a string literal,
11549 we know it too. */
11550 else if (fcode == BUILT_IN_SPRINTF_CHK
11551 && strcmp (fmt_str, target_percent_s) == 0)
11553 tree arg;
11555 if (nargs < 5)
11556 return;
11557 arg = CALL_EXPR_ARG (exp, 4);
11558 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11559 return;
11561 len = c_strlen (arg, 1);
11562 if (!len || ! tree_fits_uhwi_p (len))
11563 return;
11565 else
11566 return;
11568 if (! tree_int_cst_lt (len, size))
11569 warning_at (tree_nonartificial_location (exp),
11570 0, "%Kcall to %D will always overflow destination buffer",
11571 exp, get_callee_fndecl (exp));
11574 /* Emit warning if a free is called with address of a variable. */
11576 static void
11577 maybe_emit_free_warning (tree exp)
11579 tree arg = CALL_EXPR_ARG (exp, 0);
11581 STRIP_NOPS (arg);
11582 if (TREE_CODE (arg) != ADDR_EXPR)
11583 return;
11585 arg = get_base_address (TREE_OPERAND (arg, 0));
11586 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11587 return;
11589 if (SSA_VAR_P (arg))
11590 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11591 "%Kattempt to free a non-heap object %qD", exp, arg);
11592 else
11593 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11594 "%Kattempt to free a non-heap object", exp);
11597 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11598 if possible. */
11600 static tree
11601 fold_builtin_object_size (tree ptr, tree ost)
11603 unsigned HOST_WIDE_INT bytes;
11604 int object_size_type;
11606 if (!validate_arg (ptr, POINTER_TYPE)
11607 || !validate_arg (ost, INTEGER_TYPE))
11608 return NULL_TREE;
11610 STRIP_NOPS (ost);
11612 if (TREE_CODE (ost) != INTEGER_CST
11613 || tree_int_cst_sgn (ost) < 0
11614 || compare_tree_int (ost, 3) > 0)
11615 return NULL_TREE;
11617 object_size_type = tree_to_shwi (ost);
11619 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11620 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11621 and (size_t) 0 for types 2 and 3. */
11622 if (TREE_SIDE_EFFECTS (ptr))
11623 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11625 if (TREE_CODE (ptr) == ADDR_EXPR)
11627 bytes = compute_builtin_object_size (ptr, object_size_type);
11628 if (wi::fits_to_tree_p (bytes, size_type_node))
11629 return build_int_cstu (size_type_node, bytes);
11631 else if (TREE_CODE (ptr) == SSA_NAME)
11633 /* If object size is not known yet, delay folding until
11634 later. Maybe subsequent passes will help determining
11635 it. */
11636 bytes = compute_builtin_object_size (ptr, object_size_type);
11637 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11638 && wi::fits_to_tree_p (bytes, size_type_node))
11639 return build_int_cstu (size_type_node, bytes);
11642 return NULL_TREE;
11645 /* Builtins with folding operations that operate on "..." arguments
11646 need special handling; we need to store the arguments in a convenient
11647 data structure before attempting any folding. Fortunately there are
11648 only a few builtins that fall into this category. FNDECL is the
11649 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11650 result of the function call is ignored. */
11652 static tree
11653 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11654 bool ignore ATTRIBUTE_UNUSED)
11656 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11657 tree ret = NULL_TREE;
11659 switch (fcode)
11661 case BUILT_IN_FPCLASSIFY:
11662 ret = fold_builtin_fpclassify (loc, exp);
11663 break;
11665 default:
11666 break;
11668 if (ret)
11670 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11671 SET_EXPR_LOCATION (ret, loc);
11672 TREE_NO_WARNING (ret) = 1;
11673 return ret;
11675 return NULL_TREE;
11678 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
11679 FMT and ARG are the arguments to the call; we don't fold cases with
11680 more than 2 arguments, and ARG may be null if this is a 1-argument case.
11682 Return NULL_TREE if no simplification was possible, otherwise return the
11683 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11684 code of the function to be simplified. */
11686 static tree
11687 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
11688 tree arg, bool ignore,
11689 enum built_in_function fcode)
11691 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
11692 const char *fmt_str = NULL;
11694 /* If the return value is used, don't do the transformation. */
11695 if (! ignore)
11696 return NULL_TREE;
11698 /* Verify the required arguments in the original call. */
11699 if (!validate_arg (fmt, POINTER_TYPE))
11700 return NULL_TREE;
11702 /* Check whether the format is a literal string constant. */
11703 fmt_str = c_getstr (fmt);
11704 if (fmt_str == NULL)
11705 return NULL_TREE;
11707 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
11709 /* If we're using an unlocked function, assume the other
11710 unlocked functions exist explicitly. */
11711 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
11712 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
11714 else
11716 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
11717 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
11720 if (!init_target_chars ())
11721 return NULL_TREE;
11723 if (strcmp (fmt_str, target_percent_s) == 0
11724 || strchr (fmt_str, target_percent) == NULL)
11726 const char *str;
11728 if (strcmp (fmt_str, target_percent_s) == 0)
11730 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11731 return NULL_TREE;
11733 if (!arg || !validate_arg (arg, POINTER_TYPE))
11734 return NULL_TREE;
11736 str = c_getstr (arg);
11737 if (str == NULL)
11738 return NULL_TREE;
11740 else
11742 /* The format specifier doesn't contain any '%' characters. */
11743 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
11744 && arg)
11745 return NULL_TREE;
11746 str = fmt_str;
11749 /* If the string was "", printf does nothing. */
11750 if (str[0] == '\0')
11751 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
11753 /* If the string has length of 1, call putchar. */
11754 if (str[1] == '\0')
11756 /* Given printf("c"), (where c is any one character,)
11757 convert "c"[0] to an int and pass that to the replacement
11758 function. */
11759 newarg = build_int_cst (integer_type_node, str[0]);
11760 if (fn_putchar)
11761 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
11763 else
11765 /* If the string was "string\n", call puts("string"). */
11766 size_t len = strlen (str);
11767 if ((unsigned char)str[len - 1] == target_newline
11768 && (size_t) (int) len == len
11769 && (int) len > 0)
11771 char *newstr;
11772 tree offset_node, string_cst;
11774 /* Create a NUL-terminated string that's one char shorter
11775 than the original, stripping off the trailing '\n'. */
11776 newarg = build_string_literal (len, str);
11777 string_cst = string_constant (newarg, &offset_node);
11778 gcc_checking_assert (string_cst
11779 && (TREE_STRING_LENGTH (string_cst)
11780 == (int) len)
11781 && integer_zerop (offset_node)
11782 && (unsigned char)
11783 TREE_STRING_POINTER (string_cst)[len - 1]
11784 == target_newline);
11785 /* build_string_literal creates a new STRING_CST,
11786 modify it in place to avoid double copying. */
11787 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
11788 newstr[len - 1] = '\0';
11789 if (fn_puts)
11790 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
11792 else
11793 /* We'd like to arrange to call fputs(string,stdout) here,
11794 but we need stdout and don't have a way to get it yet. */
11795 return NULL_TREE;
11799 /* The other optimizations can be done only on the non-va_list variants. */
11800 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11801 return NULL_TREE;
11803 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
11804 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
11806 if (!arg || !validate_arg (arg, POINTER_TYPE))
11807 return NULL_TREE;
11808 if (fn_puts)
11809 call = build_call_expr_loc (loc, fn_puts, 1, arg);
11812 /* If the format specifier was "%c", call __builtin_putchar(arg). */
11813 else if (strcmp (fmt_str, target_percent_c) == 0)
11815 if (!arg || !validate_arg (arg, INTEGER_TYPE))
11816 return NULL_TREE;
11817 if (fn_putchar)
11818 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
11821 if (!call)
11822 return NULL_TREE;
11824 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
11827 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
11828 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
11829 more than 3 arguments, and ARG may be null in the 2-argument case.
11831 Return NULL_TREE if no simplification was possible, otherwise return the
11832 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11833 code of the function to be simplified. */
11835 static tree
11836 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
11837 tree fmt, tree arg, bool ignore,
11838 enum built_in_function fcode)
11840 tree fn_fputc, fn_fputs, call = NULL_TREE;
11841 const char *fmt_str = NULL;
11843 /* If the return value is used, don't do the transformation. */
11844 if (! ignore)
11845 return NULL_TREE;
11847 /* Verify the required arguments in the original call. */
11848 if (!validate_arg (fp, POINTER_TYPE))
11849 return NULL_TREE;
11850 if (!validate_arg (fmt, POINTER_TYPE))
11851 return NULL_TREE;
11853 /* Check whether the format is a literal string constant. */
11854 fmt_str = c_getstr (fmt);
11855 if (fmt_str == NULL)
11856 return NULL_TREE;
11858 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
11860 /* If we're using an unlocked function, assume the other
11861 unlocked functions exist explicitly. */
11862 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
11863 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
11865 else
11867 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
11868 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
11871 if (!init_target_chars ())
11872 return NULL_TREE;
11874 /* If the format doesn't contain % args or %%, use strcpy. */
11875 if (strchr (fmt_str, target_percent) == NULL)
11877 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
11878 && arg)
11879 return NULL_TREE;
11881 /* If the format specifier was "", fprintf does nothing. */
11882 if (fmt_str[0] == '\0')
11884 /* If FP has side-effects, just wait until gimplification is
11885 done. */
11886 if (TREE_SIDE_EFFECTS (fp))
11887 return NULL_TREE;
11889 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
11892 /* When "string" doesn't contain %, replace all cases of
11893 fprintf (fp, string) with fputs (string, fp). The fputs
11894 builtin will take care of special cases like length == 1. */
11895 if (fn_fputs)
11896 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
11899 /* The other optimizations can be done only on the non-va_list variants. */
11900 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
11901 return NULL_TREE;
11903 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
11904 else if (strcmp (fmt_str, target_percent_s) == 0)
11906 if (!arg || !validate_arg (arg, POINTER_TYPE))
11907 return NULL_TREE;
11908 if (fn_fputs)
11909 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
11912 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
11913 else if (strcmp (fmt_str, target_percent_c) == 0)
11915 if (!arg || !validate_arg (arg, INTEGER_TYPE))
11916 return NULL_TREE;
11917 if (fn_fputc)
11918 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
11921 if (!call)
11922 return NULL_TREE;
11923 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
11926 /* Initialize format string characters in the target charset. */
11928 bool
11929 init_target_chars (void)
11931 static bool init;
11932 if (!init)
11934 target_newline = lang_hooks.to_target_charset ('\n');
11935 target_percent = lang_hooks.to_target_charset ('%');
11936 target_c = lang_hooks.to_target_charset ('c');
11937 target_s = lang_hooks.to_target_charset ('s');
11938 if (target_newline == 0 || target_percent == 0 || target_c == 0
11939 || target_s == 0)
11940 return false;
11942 target_percent_c[0] = target_percent;
11943 target_percent_c[1] = target_c;
11944 target_percent_c[2] = '\0';
11946 target_percent_s[0] = target_percent;
11947 target_percent_s[1] = target_s;
11948 target_percent_s[2] = '\0';
11950 target_percent_s_newline[0] = target_percent;
11951 target_percent_s_newline[1] = target_s;
11952 target_percent_s_newline[2] = target_newline;
11953 target_percent_s_newline[3] = '\0';
11955 init = true;
11957 return true;
11960 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11961 and no overflow/underflow occurred. INEXACT is true if M was not
11962 exactly calculated. TYPE is the tree type for the result. This
11963 function assumes that you cleared the MPFR flags and then
11964 calculated M to see if anything subsequently set a flag prior to
11965 entering this function. Return NULL_TREE if any checks fail. */
11967 static tree
11968 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11970 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11971 overflow/underflow occurred. If -frounding-math, proceed iff the
11972 result of calling FUNC was exact. */
11973 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11974 && (!flag_rounding_math || !inexact))
11976 REAL_VALUE_TYPE rr;
11978 real_from_mpfr (&rr, m, type, GMP_RNDN);
11979 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11980 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11981 but the mpft_t is not, then we underflowed in the
11982 conversion. */
11983 if (real_isfinite (&rr)
11984 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11986 REAL_VALUE_TYPE rmode;
11988 real_convert (&rmode, TYPE_MODE (type), &rr);
11989 /* Proceed iff the specified mode can hold the value. */
11990 if (real_identical (&rmode, &rr))
11991 return build_real (type, rmode);
11994 return NULL_TREE;
11997 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11998 number and no overflow/underflow occurred. INEXACT is true if M
11999 was not exactly calculated. TYPE is the tree type for the result.
12000 This function assumes that you cleared the MPFR flags and then
12001 calculated M to see if anything subsequently set a flag prior to
12002 entering this function. Return NULL_TREE if any checks fail, if
12003 FORCE_CONVERT is true, then bypass the checks. */
12005 static tree
12006 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12008 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12009 overflow/underflow occurred. If -frounding-math, proceed iff the
12010 result of calling FUNC was exact. */
12011 if (force_convert
12012 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12013 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12014 && (!flag_rounding_math || !inexact)))
12016 REAL_VALUE_TYPE re, im;
12018 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12019 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12020 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12021 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12022 but the mpft_t is not, then we underflowed in the
12023 conversion. */
12024 if (force_convert
12025 || (real_isfinite (&re) && real_isfinite (&im)
12026 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12027 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12029 REAL_VALUE_TYPE re_mode, im_mode;
12031 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12032 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12033 /* Proceed iff the specified mode can hold the value. */
12034 if (force_convert
12035 || (real_identical (&re_mode, &re)
12036 && real_identical (&im_mode, &im)))
12037 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12038 build_real (TREE_TYPE (type), im_mode));
12041 return NULL_TREE;
12044 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12045 FUNC on it and return the resulting value as a tree with type TYPE.
12046 If MIN and/or MAX are not NULL, then the supplied ARG must be
12047 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12048 acceptable values, otherwise they are not. The mpfr precision is
12049 set to the precision of TYPE. We assume that function FUNC returns
12050 zero if the result could be calculated exactly within the requested
12051 precision. */
12053 static tree
12054 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12055 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12056 bool inclusive)
12058 tree result = NULL_TREE;
12060 STRIP_NOPS (arg);
12062 /* To proceed, MPFR must exactly represent the target floating point
12063 format, which only happens when the target base equals two. */
12064 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12065 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12067 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12069 if (real_isfinite (ra)
12070 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12071 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12073 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12074 const int prec = fmt->p;
12075 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12076 int inexact;
12077 mpfr_t m;
12079 mpfr_init2 (m, prec);
12080 mpfr_from_real (m, ra, GMP_RNDN);
12081 mpfr_clear_flags ();
12082 inexact = func (m, m, rnd);
12083 result = do_mpfr_ckconv (m, type, inexact);
12084 mpfr_clear (m);
12088 return result;
12091 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12092 FUNC on it and return the resulting value as a tree with type TYPE.
12093 The mpfr precision is set to the precision of TYPE. We assume that
12094 function FUNC returns zero if the result could be calculated
12095 exactly within the requested precision. */
12097 static tree
12098 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12099 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12101 tree result = NULL_TREE;
12103 STRIP_NOPS (arg1);
12104 STRIP_NOPS (arg2);
12106 /* To proceed, MPFR must exactly represent the target floating point
12107 format, which only happens when the target base equals two. */
12108 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12109 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12110 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12112 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12113 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12115 if (real_isfinite (ra1) && real_isfinite (ra2))
12117 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12118 const int prec = fmt->p;
12119 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12120 int inexact;
12121 mpfr_t m1, m2;
12123 mpfr_inits2 (prec, m1, m2, NULL);
12124 mpfr_from_real (m1, ra1, GMP_RNDN);
12125 mpfr_from_real (m2, ra2, GMP_RNDN);
12126 mpfr_clear_flags ();
12127 inexact = func (m1, m1, m2, rnd);
12128 result = do_mpfr_ckconv (m1, type, inexact);
12129 mpfr_clears (m1, m2, NULL);
12133 return result;
12136 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12137 FUNC on it and return the resulting value as a tree with type TYPE.
12138 The mpfr precision is set to the precision of TYPE. We assume that
12139 function FUNC returns zero if the result could be calculated
12140 exactly within the requested precision. */
12142 static tree
12143 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12144 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12146 tree result = NULL_TREE;
12148 STRIP_NOPS (arg1);
12149 STRIP_NOPS (arg2);
12150 STRIP_NOPS (arg3);
12152 /* To proceed, MPFR must exactly represent the target floating point
12153 format, which only happens when the target base equals two. */
12154 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12155 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12156 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12157 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12159 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12160 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12161 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12163 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12165 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12166 const int prec = fmt->p;
12167 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12168 int inexact;
12169 mpfr_t m1, m2, m3;
12171 mpfr_inits2 (prec, m1, m2, m3, NULL);
12172 mpfr_from_real (m1, ra1, GMP_RNDN);
12173 mpfr_from_real (m2, ra2, GMP_RNDN);
12174 mpfr_from_real (m3, ra3, GMP_RNDN);
12175 mpfr_clear_flags ();
12176 inexact = func (m1, m1, m2, m3, rnd);
12177 result = do_mpfr_ckconv (m1, type, inexact);
12178 mpfr_clears (m1, m2, m3, NULL);
12182 return result;
12185 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12186 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12187 If ARG_SINP and ARG_COSP are NULL then the result is returned
12188 as a complex value.
12189 The type is taken from the type of ARG and is used for setting the
12190 precision of the calculation and results. */
12192 static tree
12193 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12195 tree const type = TREE_TYPE (arg);
12196 tree result = NULL_TREE;
12198 STRIP_NOPS (arg);
12200 /* To proceed, MPFR must exactly represent the target floating point
12201 format, which only happens when the target base equals two. */
12202 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12203 && TREE_CODE (arg) == REAL_CST
12204 && !TREE_OVERFLOW (arg))
12206 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12208 if (real_isfinite (ra))
12210 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12211 const int prec = fmt->p;
12212 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12213 tree result_s, result_c;
12214 int inexact;
12215 mpfr_t m, ms, mc;
12217 mpfr_inits2 (prec, m, ms, mc, NULL);
12218 mpfr_from_real (m, ra, GMP_RNDN);
12219 mpfr_clear_flags ();
12220 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12221 result_s = do_mpfr_ckconv (ms, type, inexact);
12222 result_c = do_mpfr_ckconv (mc, type, inexact);
12223 mpfr_clears (m, ms, mc, NULL);
12224 if (result_s && result_c)
12226 /* If we are to return in a complex value do so. */
12227 if (!arg_sinp && !arg_cosp)
12228 return build_complex (build_complex_type (type),
12229 result_c, result_s);
12231 /* Dereference the sin/cos pointer arguments. */
12232 arg_sinp = build_fold_indirect_ref (arg_sinp);
12233 arg_cosp = build_fold_indirect_ref (arg_cosp);
12234 /* Proceed if valid pointer type were passed in. */
12235 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12236 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12238 /* Set the values. */
12239 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12240 result_s);
12241 TREE_SIDE_EFFECTS (result_s) = 1;
12242 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12243 result_c);
12244 TREE_SIDE_EFFECTS (result_c) = 1;
12245 /* Combine the assignments into a compound expr. */
12246 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12247 result_s, result_c));
12252 return result;
12255 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12256 two-argument mpfr order N Bessel function FUNC on them and return
12257 the resulting value as a tree with type TYPE. The mpfr precision
12258 is set to the precision of TYPE. We assume that function FUNC
12259 returns zero if the result could be calculated exactly within the
12260 requested precision. */
12261 static tree
12262 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12263 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12264 const REAL_VALUE_TYPE *min, bool inclusive)
12266 tree result = NULL_TREE;
12268 STRIP_NOPS (arg1);
12269 STRIP_NOPS (arg2);
12271 /* To proceed, MPFR must exactly represent the target floating point
12272 format, which only happens when the target base equals two. */
12273 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12274 && tree_fits_shwi_p (arg1)
12275 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12277 const HOST_WIDE_INT n = tree_to_shwi (arg1);
12278 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12280 if (n == (long)n
12281 && real_isfinite (ra)
12282 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12284 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12285 const int prec = fmt->p;
12286 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12287 int inexact;
12288 mpfr_t m;
12290 mpfr_init2 (m, prec);
12291 mpfr_from_real (m, ra, GMP_RNDN);
12292 mpfr_clear_flags ();
12293 inexact = func (m, n, m, rnd);
12294 result = do_mpfr_ckconv (m, type, inexact);
12295 mpfr_clear (m);
12299 return result;
12302 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12303 the pointer *(ARG_QUO) and return the result. The type is taken
12304 from the type of ARG0 and is used for setting the precision of the
12305 calculation and results. */
12307 static tree
12308 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12310 tree const type = TREE_TYPE (arg0);
12311 tree result = NULL_TREE;
12313 STRIP_NOPS (arg0);
12314 STRIP_NOPS (arg1);
12316 /* To proceed, MPFR must exactly represent the target floating point
12317 format, which only happens when the target base equals two. */
12318 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12319 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12320 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12322 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12323 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12325 if (real_isfinite (ra0) && real_isfinite (ra1))
12327 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12328 const int prec = fmt->p;
12329 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12330 tree result_rem;
12331 long integer_quo;
12332 mpfr_t m0, m1;
12334 mpfr_inits2 (prec, m0, m1, NULL);
12335 mpfr_from_real (m0, ra0, GMP_RNDN);
12336 mpfr_from_real (m1, ra1, GMP_RNDN);
12337 mpfr_clear_flags ();
12338 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12339 /* Remquo is independent of the rounding mode, so pass
12340 inexact=0 to do_mpfr_ckconv(). */
12341 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12342 mpfr_clears (m0, m1, NULL);
12343 if (result_rem)
12345 /* MPFR calculates quo in the host's long so it may
12346 return more bits in quo than the target int can hold
12347 if sizeof(host long) > sizeof(target int). This can
12348 happen even for native compilers in LP64 mode. In
12349 these cases, modulo the quo value with the largest
12350 number that the target int can hold while leaving one
12351 bit for the sign. */
12352 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12353 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12355 /* Dereference the quo pointer argument. */
12356 arg_quo = build_fold_indirect_ref (arg_quo);
12357 /* Proceed iff a valid pointer type was passed in. */
12358 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12360 /* Set the value. */
12361 tree result_quo
12362 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12363 build_int_cst (TREE_TYPE (arg_quo),
12364 integer_quo));
12365 TREE_SIDE_EFFECTS (result_quo) = 1;
12366 /* Combine the quo assignment with the rem. */
12367 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12368 result_quo, result_rem));
12373 return result;
12376 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12377 resulting value as a tree with type TYPE. The mpfr precision is
12378 set to the precision of TYPE. We assume that this mpfr function
12379 returns zero if the result could be calculated exactly within the
12380 requested precision. In addition, the integer pointer represented
12381 by ARG_SG will be dereferenced and set to the appropriate signgam
12382 (-1,1) value. */
12384 static tree
12385 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12387 tree result = NULL_TREE;
12389 STRIP_NOPS (arg);
12391 /* To proceed, MPFR must exactly represent the target floating point
12392 format, which only happens when the target base equals two. Also
12393 verify ARG is a constant and that ARG_SG is an int pointer. */
12394 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12395 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12396 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12397 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12399 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12401 /* In addition to NaN and Inf, the argument cannot be zero or a
12402 negative integer. */
12403 if (real_isfinite (ra)
12404 && ra->cl != rvc_zero
12405 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12407 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12408 const int prec = fmt->p;
12409 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12410 int inexact, sg;
12411 mpfr_t m;
12412 tree result_lg;
12414 mpfr_init2 (m, prec);
12415 mpfr_from_real (m, ra, GMP_RNDN);
12416 mpfr_clear_flags ();
12417 inexact = mpfr_lgamma (m, &sg, m, rnd);
12418 result_lg = do_mpfr_ckconv (m, type, inexact);
12419 mpfr_clear (m);
12420 if (result_lg)
12422 tree result_sg;
12424 /* Dereference the arg_sg pointer argument. */
12425 arg_sg = build_fold_indirect_ref (arg_sg);
12426 /* Assign the signgam value into *arg_sg. */
12427 result_sg = fold_build2 (MODIFY_EXPR,
12428 TREE_TYPE (arg_sg), arg_sg,
12429 build_int_cst (TREE_TYPE (arg_sg), sg));
12430 TREE_SIDE_EFFECTS (result_sg) = 1;
12431 /* Combine the signgam assignment with the lgamma result. */
12432 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12433 result_sg, result_lg));
12438 return result;
12441 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12442 function FUNC on it and return the resulting value as a tree with
12443 type TYPE. The mpfr precision is set to the precision of TYPE. We
12444 assume that function FUNC returns zero if the result could be
12445 calculated exactly within the requested precision. */
12447 static tree
12448 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12450 tree result = NULL_TREE;
12452 STRIP_NOPS (arg);
12454 /* To proceed, MPFR must exactly represent the target floating point
12455 format, which only happens when the target base equals two. */
12456 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12457 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12458 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12460 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12461 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12463 if (real_isfinite (re) && real_isfinite (im))
12465 const struct real_format *const fmt =
12466 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12467 const int prec = fmt->p;
12468 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12469 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12470 int inexact;
12471 mpc_t m;
12473 mpc_init2 (m, prec);
12474 mpfr_from_real (mpc_realref (m), re, rnd);
12475 mpfr_from_real (mpc_imagref (m), im, rnd);
12476 mpfr_clear_flags ();
12477 inexact = func (m, m, crnd);
12478 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12479 mpc_clear (m);
12483 return result;
12486 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12487 mpc function FUNC on it and return the resulting value as a tree
12488 with type TYPE. The mpfr precision is set to the precision of
12489 TYPE. We assume that function FUNC returns zero if the result
12490 could be calculated exactly within the requested precision. If
12491 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12492 in the arguments and/or results. */
12494 tree
12495 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12496 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12498 tree result = NULL_TREE;
12500 STRIP_NOPS (arg0);
12501 STRIP_NOPS (arg1);
12503 /* To proceed, MPFR must exactly represent the target floating point
12504 format, which only happens when the target base equals two. */
12505 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12506 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12507 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12508 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12509 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12511 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12512 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12513 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12514 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12516 if (do_nonfinite
12517 || (real_isfinite (re0) && real_isfinite (im0)
12518 && real_isfinite (re1) && real_isfinite (im1)))
12520 const struct real_format *const fmt =
12521 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12522 const int prec = fmt->p;
12523 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12524 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12525 int inexact;
12526 mpc_t m0, m1;
12528 mpc_init2 (m0, prec);
12529 mpc_init2 (m1, prec);
12530 mpfr_from_real (mpc_realref (m0), re0, rnd);
12531 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12532 mpfr_from_real (mpc_realref (m1), re1, rnd);
12533 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12534 mpfr_clear_flags ();
12535 inexact = func (m0, m0, m1, crnd);
12536 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12537 mpc_clear (m0);
12538 mpc_clear (m1);
12542 return result;
12545 /* A wrapper function for builtin folding that prevents warnings for
12546 "statement without effect" and the like, caused by removing the
12547 call node earlier than the warning is generated. */
12549 tree
12550 fold_call_stmt (gimple stmt, bool ignore)
12552 tree ret = NULL_TREE;
12553 tree fndecl = gimple_call_fndecl (stmt);
12554 location_t loc = gimple_location (stmt);
12555 if (fndecl
12556 && TREE_CODE (fndecl) == FUNCTION_DECL
12557 && DECL_BUILT_IN (fndecl)
12558 && !gimple_call_va_arg_pack_p (stmt))
12560 int nargs = gimple_call_num_args (stmt);
12561 tree *args = (nargs > 0
12562 ? gimple_call_arg_ptr (stmt, 0)
12563 : &error_mark_node);
12565 if (avoid_folding_inline_builtin (fndecl))
12566 return NULL_TREE;
12567 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12569 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12571 else
12573 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
12574 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12575 if (ret)
12577 /* Propagate location information from original call to
12578 expansion of builtin. Otherwise things like
12579 maybe_emit_chk_warning, that operate on the expansion
12580 of a builtin, will use the wrong location information. */
12581 if (gimple_has_location (stmt))
12583 tree realret = ret;
12584 if (TREE_CODE (ret) == NOP_EXPR)
12585 realret = TREE_OPERAND (ret, 0);
12586 if (CAN_HAVE_LOCATION_P (realret)
12587 && !EXPR_HAS_LOCATION (realret))
12588 SET_EXPR_LOCATION (realret, loc);
12589 return realret;
12591 return ret;
12595 return NULL_TREE;
12598 /* Look up the function in builtin_decl that corresponds to DECL
12599 and set ASMSPEC as its user assembler name. DECL must be a
12600 function decl that declares a builtin. */
12602 void
12603 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12605 tree builtin;
12606 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12607 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12608 && asmspec != 0);
12610 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12611 set_user_assembler_name (builtin, asmspec);
12612 switch (DECL_FUNCTION_CODE (decl))
12614 case BUILT_IN_MEMCPY:
12615 init_block_move_fn (asmspec);
12616 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12617 break;
12618 case BUILT_IN_MEMSET:
12619 init_block_clear_fn (asmspec);
12620 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12621 break;
12622 case BUILT_IN_MEMMOVE:
12623 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12624 break;
12625 case BUILT_IN_MEMCMP:
12626 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12627 break;
12628 case BUILT_IN_ABORT:
12629 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12630 break;
12631 case BUILT_IN_FFS:
12632 if (INT_TYPE_SIZE < BITS_PER_WORD)
12634 set_user_assembler_libfunc ("ffs", asmspec);
12635 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12636 MODE_INT, 0), "ffs");
12638 break;
12639 default:
12640 break;
12644 /* Return true if DECL is a builtin that expands to a constant or similarly
12645 simple code. */
12646 bool
12647 is_simple_builtin (tree decl)
12649 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12650 switch (DECL_FUNCTION_CODE (decl))
12652 /* Builtins that expand to constants. */
12653 case BUILT_IN_CONSTANT_P:
12654 case BUILT_IN_EXPECT:
12655 case BUILT_IN_OBJECT_SIZE:
12656 case BUILT_IN_UNREACHABLE:
12657 /* Simple register moves or loads from stack. */
12658 case BUILT_IN_ASSUME_ALIGNED:
12659 case BUILT_IN_RETURN_ADDRESS:
12660 case BUILT_IN_EXTRACT_RETURN_ADDR:
12661 case BUILT_IN_FROB_RETURN_ADDR:
12662 case BUILT_IN_RETURN:
12663 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12664 case BUILT_IN_FRAME_ADDRESS:
12665 case BUILT_IN_VA_END:
12666 case BUILT_IN_STACK_SAVE:
12667 case BUILT_IN_STACK_RESTORE:
12668 /* Exception state returns or moves registers around. */
12669 case BUILT_IN_EH_FILTER:
12670 case BUILT_IN_EH_POINTER:
12671 case BUILT_IN_EH_COPY_VALUES:
12672 return true;
12674 default:
12675 return false;
12678 return false;
12681 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12682 most probably expanded inline into reasonably simple code. This is a
12683 superset of is_simple_builtin. */
12684 bool
12685 is_inexpensive_builtin (tree decl)
12687 if (!decl)
12688 return false;
12689 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12690 return true;
12691 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12692 switch (DECL_FUNCTION_CODE (decl))
12694 case BUILT_IN_ABS:
12695 case BUILT_IN_ALLOCA:
12696 case BUILT_IN_ALLOCA_WITH_ALIGN:
12697 case BUILT_IN_BSWAP16:
12698 case BUILT_IN_BSWAP32:
12699 case BUILT_IN_BSWAP64:
12700 case BUILT_IN_CLZ:
12701 case BUILT_IN_CLZIMAX:
12702 case BUILT_IN_CLZL:
12703 case BUILT_IN_CLZLL:
12704 case BUILT_IN_CTZ:
12705 case BUILT_IN_CTZIMAX:
12706 case BUILT_IN_CTZL:
12707 case BUILT_IN_CTZLL:
12708 case BUILT_IN_FFS:
12709 case BUILT_IN_FFSIMAX:
12710 case BUILT_IN_FFSL:
12711 case BUILT_IN_FFSLL:
12712 case BUILT_IN_IMAXABS:
12713 case BUILT_IN_FINITE:
12714 case BUILT_IN_FINITEF:
12715 case BUILT_IN_FINITEL:
12716 case BUILT_IN_FINITED32:
12717 case BUILT_IN_FINITED64:
12718 case BUILT_IN_FINITED128:
12719 case BUILT_IN_FPCLASSIFY:
12720 case BUILT_IN_ISFINITE:
12721 case BUILT_IN_ISINF_SIGN:
12722 case BUILT_IN_ISINF:
12723 case BUILT_IN_ISINFF:
12724 case BUILT_IN_ISINFL:
12725 case BUILT_IN_ISINFD32:
12726 case BUILT_IN_ISINFD64:
12727 case BUILT_IN_ISINFD128:
12728 case BUILT_IN_ISNAN:
12729 case BUILT_IN_ISNANF:
12730 case BUILT_IN_ISNANL:
12731 case BUILT_IN_ISNAND32:
12732 case BUILT_IN_ISNAND64:
12733 case BUILT_IN_ISNAND128:
12734 case BUILT_IN_ISNORMAL:
12735 case BUILT_IN_ISGREATER:
12736 case BUILT_IN_ISGREATEREQUAL:
12737 case BUILT_IN_ISLESS:
12738 case BUILT_IN_ISLESSEQUAL:
12739 case BUILT_IN_ISLESSGREATER:
12740 case BUILT_IN_ISUNORDERED:
12741 case BUILT_IN_VA_ARG_PACK:
12742 case BUILT_IN_VA_ARG_PACK_LEN:
12743 case BUILT_IN_VA_COPY:
12744 case BUILT_IN_TRAP:
12745 case BUILT_IN_SAVEREGS:
12746 case BUILT_IN_POPCOUNTL:
12747 case BUILT_IN_POPCOUNTLL:
12748 case BUILT_IN_POPCOUNTIMAX:
12749 case BUILT_IN_POPCOUNT:
12750 case BUILT_IN_PARITYL:
12751 case BUILT_IN_PARITYLL:
12752 case BUILT_IN_PARITYIMAX:
12753 case BUILT_IN_PARITY:
12754 case BUILT_IN_LABS:
12755 case BUILT_IN_LLABS:
12756 case BUILT_IN_PREFETCH:
12757 case BUILT_IN_ACC_ON_DEVICE:
12758 return true;
12760 default:
12761 return is_simple_builtin (decl);
12764 return false;