OpenACC acc_on_device.
[official-gcc.git] / gcc / builtins.c
blob5b2ebcc11bb764a48c74288c4e656adebb3e59d1
1 /* Expand builtin functions.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "calls.h"
30 #include "varasm.h"
31 #include "tree-object-size.h"
32 #include "realmpfr.h"
33 #include "basic-block.h"
34 #include "tree-ssa-alias.h"
35 #include "internal-fn.h"
36 #include "gimple-expr.h"
37 #include "is-a.h"
38 #include "gimple.h"
39 #include "flags.h"
40 #include "regs.h"
41 #include "hard-reg-set.h"
42 #include "except.h"
43 #include "function.h"
44 #include "insn-config.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "recog.h"
49 #include "output.h"
50 #include "typeclass.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "langhooks.h"
55 #include "tree-ssanames.h"
56 #include "tree-dfa.h"
57 #include "value-prof.h"
58 #include "diagnostic-core.h"
59 #include "builtins.h"
60 #include "ubsan.h"
61 #include "cilk.h"
64 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
66 struct target_builtins default_target_builtins;
67 #if SWITCHABLE_TARGET
68 struct target_builtins *this_target_builtins = &default_target_builtins;
69 #endif
71 /* Define the names of the builtin function types and codes. */
72 const char *const built_in_class_names[BUILT_IN_LAST]
73 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
75 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
76 const char * built_in_names[(int) END_BUILTINS] =
78 #include "builtins.def"
80 #undef DEF_BUILTIN
82 /* Setup an array of _DECL trees, make sure each element is
83 initialized to NULL_TREE. */
84 builtin_info_type builtin_info;
86 /* Non-zero if __builtin_constant_p should be folded right away. */
87 bool force_folding_builtin_constant_p;
89 static rtx c_readstr (const char *, enum machine_mode);
90 static int target_char_cast (tree, char *);
91 static rtx get_memory_rtx (tree, tree);
92 static int apply_args_size (void);
93 static int apply_result_size (void);
94 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
95 static rtx result_vector (int, rtx);
96 #endif
97 static void expand_builtin_update_setjmp_buf (rtx);
98 static void expand_builtin_prefetch (tree);
99 static rtx expand_builtin_apply_args (void);
100 static rtx expand_builtin_apply_args_1 (void);
101 static rtx expand_builtin_apply (rtx, rtx, rtx);
102 static void expand_builtin_return (rtx);
103 static enum type_class type_to_class (tree);
104 static rtx expand_builtin_classify_type (tree);
105 static void expand_errno_check (tree, rtx);
106 static rtx expand_builtin_mathfn (tree, rtx, rtx);
107 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
109 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
110 static rtx expand_builtin_interclass_mathfn (tree, rtx);
111 static rtx expand_builtin_sincos (tree);
112 static rtx expand_builtin_cexpi (tree, rtx);
113 static rtx expand_builtin_int_roundingfn (tree, rtx);
114 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
115 static rtx expand_builtin_next_arg (void);
116 static rtx expand_builtin_va_start (tree);
117 static rtx expand_builtin_va_end (tree);
118 static rtx expand_builtin_va_copy (tree);
119 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strcmp (tree, rtx);
121 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
122 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
123 static rtx expand_builtin_memcpy (tree, rtx);
124 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
126 enum machine_mode, int);
127 static rtx expand_builtin_strcpy (tree, rtx);
128 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
129 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx);
131 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
132 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
133 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
134 static rtx expand_builtin_bzero (tree);
135 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_alloca (tree, bool);
137 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
138 static rtx expand_builtin_frame_address (tree, tree);
139 static tree stabilize_va_list_loc (location_t, tree, int);
140 static rtx expand_builtin_expect (tree, rtx);
141 static tree fold_builtin_constant_p (tree);
142 static tree fold_builtin_classify_type (tree);
143 static tree fold_builtin_strlen (location_t, tree, tree);
144 static tree fold_builtin_inf (location_t, tree, int);
145 static tree fold_builtin_nan (tree, tree, int);
146 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
147 static bool validate_arg (const_tree, enum tree_code code);
148 static bool integer_valued_real_p (tree);
149 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
150 static rtx expand_builtin_fabs (tree, rtx, rtx);
151 static rtx expand_builtin_signbit (tree, rtx);
152 static tree fold_builtin_sqrt (location_t, tree, tree);
153 static tree fold_builtin_cbrt (location_t, tree, tree);
154 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
155 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
156 static tree fold_builtin_cos (location_t, tree, tree, tree);
157 static tree fold_builtin_cosh (location_t, tree, tree, tree);
158 static tree fold_builtin_tan (tree, tree);
159 static tree fold_builtin_trunc (location_t, tree, tree);
160 static tree fold_builtin_floor (location_t, tree, tree);
161 static tree fold_builtin_ceil (location_t, tree, tree);
162 static tree fold_builtin_round (location_t, tree, tree);
163 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
164 static tree fold_builtin_bitop (tree, tree);
165 static tree fold_builtin_strchr (location_t, tree, tree, tree);
166 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
167 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
168 static tree fold_builtin_strcmp (location_t, tree, tree);
169 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
170 static tree fold_builtin_signbit (location_t, tree, tree);
171 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
172 static tree fold_builtin_isascii (location_t, tree);
173 static tree fold_builtin_toascii (location_t, tree);
174 static tree fold_builtin_isdigit (location_t, tree);
175 static tree fold_builtin_fabs (location_t, tree, tree);
176 static tree fold_builtin_abs (location_t, tree, tree);
177 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
178 enum tree_code);
179 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
180 static tree fold_builtin_0 (location_t, tree, bool);
181 static tree fold_builtin_1 (location_t, tree, tree, bool);
182 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
183 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
184 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
185 static tree fold_builtin_varargs (location_t, tree, tree, bool);
187 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
188 static tree fold_builtin_strstr (location_t, tree, tree, tree);
189 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
190 static tree fold_builtin_strncat (location_t, tree, tree, tree);
191 static tree fold_builtin_strspn (location_t, tree, tree);
192 static tree fold_builtin_strcspn (location_t, tree, tree);
194 static rtx expand_builtin_object_size (tree);
195 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
196 enum built_in_function);
197 static void maybe_emit_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
199 static void maybe_emit_free_warning (tree);
200 static tree fold_builtin_object_size (tree, tree);
201 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
202 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
203 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
204 enum built_in_function);
206 static unsigned HOST_WIDE_INT target_newline;
207 unsigned HOST_WIDE_INT target_percent;
208 static unsigned HOST_WIDE_INT target_c;
209 static unsigned HOST_WIDE_INT target_s;
210 static char target_percent_c[3];
211 char target_percent_s[3];
212 static char target_percent_s_newline[4];
213 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
214 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
215 static tree do_mpfr_arg2 (tree, tree, tree,
216 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
217 static tree do_mpfr_arg3 (tree, tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_sincos (tree, tree, tree);
220 static tree do_mpfr_bessel_n (tree, tree, tree,
221 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
222 const REAL_VALUE_TYPE *, bool);
223 static tree do_mpfr_remquo (tree, tree, tree);
224 static tree do_mpfr_lgamma_r (tree, tree, tree);
225 static void expand_builtin_sync_synchronize (void);
227 /* Return true if NAME starts with __builtin_ or __sync_. */
229 static bool
230 is_builtin_name (const char *name)
232 if (strncmp (name, "__builtin_", 10) == 0)
233 return true;
234 if (strncmp (name, "__sync_", 7) == 0)
235 return true;
236 if (strncmp (name, "__atomic_", 9) == 0)
237 return true;
238 if (flag_cilkplus
239 && (!strcmp (name, "__cilkrts_detach")
240 || !strcmp (name, "__cilkrts_pop_frame")))
241 return true;
242 return false;
246 /* Return true if DECL is a function symbol representing a built-in. */
248 bool
249 is_builtin_fn (tree decl)
251 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
254 /* Return true if NODE should be considered for inline expansion regardless
255 of the optimization level. This means whenever a function is invoked with
256 its "internal" name, which normally contains the prefix "__builtin". */
258 static bool
259 called_as_built_in (tree node)
261 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
262 we want the name used to call the function, not the name it
263 will have. */
264 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
265 return is_builtin_name (name);
268 /* Compute values M and N such that M divides (address of EXP - N) and such
269 that N < M. If these numbers can be determined, store M in alignp and N in
270 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
271 *alignp and any bit-offset to *bitposp.
273 Note that the address (and thus the alignment) computed here is based
274 on the address to which a symbol resolves, whereas DECL_ALIGN is based
275 on the address at which an object is actually located. These two
276 addresses are not always the same. For example, on ARM targets,
277 the address &foo of a Thumb function foo() has the lowest bit set,
278 whereas foo() itself starts on an even address.
280 If ADDR_P is true we are taking the address of the memory reference EXP
281 and thus cannot rely on the access taking place. */
283 static bool
284 get_object_alignment_2 (tree exp, unsigned int *alignp,
285 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
287 HOST_WIDE_INT bitsize, bitpos;
288 tree offset;
289 enum machine_mode mode;
290 int unsignedp, volatilep;
291 unsigned int align = BITS_PER_UNIT;
292 bool known_alignment = false;
294 /* Get the innermost object and the constant (bitpos) and possibly
295 variable (offset) offset of the access. */
296 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
297 &mode, &unsignedp, &volatilep, true);
299 /* Extract alignment information from the innermost object and
300 possibly adjust bitpos and offset. */
301 if (TREE_CODE (exp) == FUNCTION_DECL)
303 /* Function addresses can encode extra information besides their
304 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
305 allows the low bit to be used as a virtual bit, we know
306 that the address itself must be at least 2-byte aligned. */
307 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
308 align = 2 * BITS_PER_UNIT;
310 else if (TREE_CODE (exp) == LABEL_DECL)
312 else if (TREE_CODE (exp) == CONST_DECL)
314 /* The alignment of a CONST_DECL is determined by its initializer. */
315 exp = DECL_INITIAL (exp);
316 align = TYPE_ALIGN (TREE_TYPE (exp));
317 #ifdef CONSTANT_ALIGNMENT
318 if (CONSTANT_CLASS_P (exp))
319 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
320 #endif
321 known_alignment = true;
323 else if (DECL_P (exp))
325 align = DECL_ALIGN (exp);
326 known_alignment = true;
328 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
330 align = TYPE_ALIGN (TREE_TYPE (exp));
332 else if (TREE_CODE (exp) == INDIRECT_REF
333 || TREE_CODE (exp) == MEM_REF
334 || TREE_CODE (exp) == TARGET_MEM_REF)
336 tree addr = TREE_OPERAND (exp, 0);
337 unsigned ptr_align;
338 unsigned HOST_WIDE_INT ptr_bitpos;
340 if (TREE_CODE (addr) == BIT_AND_EXPR
341 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
343 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
344 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
345 align *= BITS_PER_UNIT;
346 addr = TREE_OPERAND (addr, 0);
349 known_alignment
350 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
351 align = MAX (ptr_align, align);
353 /* The alignment of the pointer operand in a TARGET_MEM_REF
354 has to take the variable offset parts into account. */
355 if (TREE_CODE (exp) == TARGET_MEM_REF)
357 if (TMR_INDEX (exp))
359 unsigned HOST_WIDE_INT step = 1;
360 if (TMR_STEP (exp))
361 step = TREE_INT_CST_LOW (TMR_STEP (exp));
362 align = MIN (align, (step & -step) * BITS_PER_UNIT);
364 if (TMR_INDEX2 (exp))
365 align = BITS_PER_UNIT;
366 known_alignment = false;
369 /* When EXP is an actual memory reference then we can use
370 TYPE_ALIGN of a pointer indirection to derive alignment.
371 Do so only if get_pointer_alignment_1 did not reveal absolute
372 alignment knowledge and if using that alignment would
373 improve the situation. */
374 if (!addr_p && !known_alignment
375 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
376 align = TYPE_ALIGN (TREE_TYPE (exp));
377 else
379 /* Else adjust bitpos accordingly. */
380 bitpos += ptr_bitpos;
381 if (TREE_CODE (exp) == MEM_REF
382 || TREE_CODE (exp) == TARGET_MEM_REF)
383 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
386 else if (TREE_CODE (exp) == STRING_CST)
388 /* STRING_CST are the only constant objects we allow to be not
389 wrapped inside a CONST_DECL. */
390 align = TYPE_ALIGN (TREE_TYPE (exp));
391 #ifdef CONSTANT_ALIGNMENT
392 if (CONSTANT_CLASS_P (exp))
393 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
394 #endif
395 known_alignment = true;
398 /* If there is a non-constant offset part extract the maximum
399 alignment that can prevail. */
400 if (offset)
402 unsigned int trailing_zeros = tree_ctz (offset);
403 if (trailing_zeros < HOST_BITS_PER_INT)
405 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
406 if (inner)
407 align = MIN (align, inner);
411 *alignp = align;
412 *bitposp = bitpos & (*alignp - 1);
413 return known_alignment;
416 /* For a memory reference expression EXP compute values M and N such that M
417 divides (&EXP - N) and such that N < M. If these numbers can be determined,
418 store M in alignp and N in *BITPOSP and return true. Otherwise return false
419 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
421 bool
422 get_object_alignment_1 (tree exp, unsigned int *alignp,
423 unsigned HOST_WIDE_INT *bitposp)
425 return get_object_alignment_2 (exp, alignp, bitposp, false);
428 /* Return the alignment in bits of EXP, an object. */
430 unsigned int
431 get_object_alignment (tree exp)
433 unsigned HOST_WIDE_INT bitpos = 0;
434 unsigned int align;
436 get_object_alignment_1 (exp, &align, &bitpos);
438 /* align and bitpos now specify known low bits of the pointer.
439 ptr & (align - 1) == bitpos. */
441 if (bitpos != 0)
442 align = (bitpos & -bitpos);
443 return align;
446 /* For a pointer valued expression EXP compute values M and N such that M
447 divides (EXP - N) and such that N < M. If these numbers can be determined,
448 store M in alignp and N in *BITPOSP and return true. Return false if
449 the results are just a conservative approximation.
451 If EXP is not a pointer, false is returned too. */
453 bool
454 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
455 unsigned HOST_WIDE_INT *bitposp)
457 STRIP_NOPS (exp);
459 if (TREE_CODE (exp) == ADDR_EXPR)
460 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
461 alignp, bitposp, true);
462 else if (TREE_CODE (exp) == SSA_NAME
463 && POINTER_TYPE_P (TREE_TYPE (exp)))
465 unsigned int ptr_align, ptr_misalign;
466 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
468 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
470 *bitposp = ptr_misalign * BITS_PER_UNIT;
471 *alignp = ptr_align * BITS_PER_UNIT;
472 /* We cannot really tell whether this result is an approximation. */
473 return true;
475 else
477 *bitposp = 0;
478 *alignp = BITS_PER_UNIT;
479 return false;
482 else if (TREE_CODE (exp) == INTEGER_CST)
484 *alignp = BIGGEST_ALIGNMENT;
485 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
486 & (BIGGEST_ALIGNMENT - 1));
487 return true;
490 *bitposp = 0;
491 *alignp = BITS_PER_UNIT;
492 return false;
495 /* Return the alignment in bits of EXP, a pointer valued expression.
496 The alignment returned is, by default, the alignment of the thing that
497 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
499 Otherwise, look at the expression to see if we can do better, i.e., if the
500 expression is actually pointing at an object whose alignment is tighter. */
502 unsigned int
503 get_pointer_alignment (tree exp)
505 unsigned HOST_WIDE_INT bitpos = 0;
506 unsigned int align;
508 get_pointer_alignment_1 (exp, &align, &bitpos);
510 /* align and bitpos now specify known low bits of the pointer.
511 ptr & (align - 1) == bitpos. */
513 if (bitpos != 0)
514 align = (bitpos & -bitpos);
516 return align;
519 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
520 way, because it could contain a zero byte in the middle.
521 TREE_STRING_LENGTH is the size of the character array, not the string.
523 ONLY_VALUE should be nonzero if the result is not going to be emitted
524 into the instruction stream and zero if it is going to be expanded.
525 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
526 is returned, otherwise NULL, since
527 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
528 evaluate the side-effects.
530 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
531 accesses. Note that this implies the result is not going to be emitted
532 into the instruction stream.
534 The value returned is of type `ssizetype'.
536 Unfortunately, string_constant can't access the values of const char
537 arrays with initializers, so neither can we do so here. */
539 tree
540 c_strlen (tree src, int only_value)
542 tree offset_node;
543 HOST_WIDE_INT offset;
544 int max;
545 const char *ptr;
546 location_t loc;
548 STRIP_NOPS (src);
549 if (TREE_CODE (src) == COND_EXPR
550 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
552 tree len1, len2;
554 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
555 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
556 if (tree_int_cst_equal (len1, len2))
557 return len1;
560 if (TREE_CODE (src) == COMPOUND_EXPR
561 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
562 return c_strlen (TREE_OPERAND (src, 1), only_value);
564 loc = EXPR_LOC_OR_LOC (src, input_location);
566 src = string_constant (src, &offset_node);
567 if (src == 0)
568 return NULL_TREE;
570 max = TREE_STRING_LENGTH (src) - 1;
571 ptr = TREE_STRING_POINTER (src);
573 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
575 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
576 compute the offset to the following null if we don't know where to
577 start searching for it. */
578 int i;
580 for (i = 0; i < max; i++)
581 if (ptr[i] == 0)
582 return NULL_TREE;
584 /* We don't know the starting offset, but we do know that the string
585 has no internal zero bytes. We can assume that the offset falls
586 within the bounds of the string; otherwise, the programmer deserves
587 what he gets. Subtract the offset from the length of the string,
588 and return that. This would perhaps not be valid if we were dealing
589 with named arrays in addition to literal string constants. */
591 return size_diffop_loc (loc, size_int (max), offset_node);
594 /* We have a known offset into the string. Start searching there for
595 a null character if we can represent it as a single HOST_WIDE_INT. */
596 if (offset_node == 0)
597 offset = 0;
598 else if (! tree_fits_shwi_p (offset_node))
599 offset = -1;
600 else
601 offset = tree_to_shwi (offset_node);
603 /* If the offset is known to be out of bounds, warn, and call strlen at
604 runtime. */
605 if (offset < 0 || offset > max)
607 /* Suppress multiple warnings for propagated constant strings. */
608 if (only_value != 2
609 && !TREE_NO_WARNING (src))
611 warning_at (loc, 0, "offset outside bounds of constant string");
612 TREE_NO_WARNING (src) = 1;
614 return NULL_TREE;
617 /* Use strlen to search for the first zero byte. Since any strings
618 constructed with build_string will have nulls appended, we win even
619 if we get handed something like (char[4])"abcd".
621 Since OFFSET is our starting index into the string, no further
622 calculation is needed. */
623 return ssize_int (strlen (ptr + offset));
626 /* Return a char pointer for a C string if it is a string constant
627 or sum of string constant and integer constant. */
629 const char *
630 c_getstr (tree src)
632 tree offset_node;
634 src = string_constant (src, &offset_node);
635 if (src == 0)
636 return 0;
638 if (offset_node == 0)
639 return TREE_STRING_POINTER (src);
640 else if (!tree_fits_uhwi_p (offset_node)
641 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
642 return 0;
644 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
647 /* Return a constant integer corresponding to target reading
648 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
650 static rtx
651 c_readstr (const char *str, enum machine_mode mode)
653 HOST_WIDE_INT ch;
654 unsigned int i, j;
655 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
657 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
658 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
659 / HOST_BITS_PER_WIDE_INT;
661 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
662 for (i = 0; i < len; i++)
663 tmp[i] = 0;
665 ch = 1;
666 for (i = 0; i < GET_MODE_SIZE (mode); i++)
668 j = i;
669 if (WORDS_BIG_ENDIAN)
670 j = GET_MODE_SIZE (mode) - i - 1;
671 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
672 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
673 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
674 j *= BITS_PER_UNIT;
676 if (ch)
677 ch = (unsigned char) str[i];
678 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
681 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
682 return immed_wide_int_const (c, mode);
685 /* Cast a target constant CST to target CHAR and if that value fits into
686 host char type, return zero and put that value into variable pointed to by
687 P. */
689 static int
690 target_char_cast (tree cst, char *p)
692 unsigned HOST_WIDE_INT val, hostval;
694 if (TREE_CODE (cst) != INTEGER_CST
695 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
696 return 1;
698 /* Do not care if it fits or not right here. */
699 val = TREE_INT_CST_LOW (cst);
701 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
702 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
704 hostval = val;
705 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
706 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
708 if (val != hostval)
709 return 1;
711 *p = hostval;
712 return 0;
715 /* Similar to save_expr, but assumes that arbitrary code is not executed
716 in between the multiple evaluations. In particular, we assume that a
717 non-addressable local variable will not be modified. */
719 static tree
720 builtin_save_expr (tree exp)
722 if (TREE_CODE (exp) == SSA_NAME
723 || (TREE_ADDRESSABLE (exp) == 0
724 && (TREE_CODE (exp) == PARM_DECL
725 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
726 return exp;
728 return save_expr (exp);
731 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
732 times to get the address of either a higher stack frame, or a return
733 address located within it (depending on FNDECL_CODE). */
735 static rtx
736 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
738 int i;
740 #ifdef INITIAL_FRAME_ADDRESS_RTX
741 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
742 #else
743 rtx tem;
745 /* For a zero count with __builtin_return_address, we don't care what
746 frame address we return, because target-specific definitions will
747 override us. Therefore frame pointer elimination is OK, and using
748 the soft frame pointer is OK.
750 For a nonzero count, or a zero count with __builtin_frame_address,
751 we require a stable offset from the current frame pointer to the
752 previous one, so we must use the hard frame pointer, and
753 we must disable frame pointer elimination. */
754 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
755 tem = frame_pointer_rtx;
756 else
758 tem = hard_frame_pointer_rtx;
760 /* Tell reload not to eliminate the frame pointer. */
761 crtl->accesses_prior_frames = 1;
763 #endif
765 /* Some machines need special handling before we can access
766 arbitrary frames. For example, on the SPARC, we must first flush
767 all register windows to the stack. */
768 #ifdef SETUP_FRAME_ADDRESSES
769 if (count > 0)
770 SETUP_FRAME_ADDRESSES ();
771 #endif
773 /* On the SPARC, the return address is not in the frame, it is in a
774 register. There is no way to access it off of the current frame
775 pointer, but it can be accessed off the previous frame pointer by
776 reading the value from the register window save area. */
777 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
778 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
779 count--;
780 #endif
782 /* Scan back COUNT frames to the specified frame. */
783 for (i = 0; i < count; i++)
785 /* Assume the dynamic chain pointer is in the word that the
786 frame address points to, unless otherwise specified. */
787 #ifdef DYNAMIC_CHAIN_ADDRESS
788 tem = DYNAMIC_CHAIN_ADDRESS (tem);
789 #endif
790 tem = memory_address (Pmode, tem);
791 tem = gen_frame_mem (Pmode, tem);
792 tem = copy_to_reg (tem);
795 /* For __builtin_frame_address, return what we've got. But, on
796 the SPARC for example, we may have to add a bias. */
797 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
798 #ifdef FRAME_ADDR_RTX
799 return FRAME_ADDR_RTX (tem);
800 #else
801 return tem;
802 #endif
804 /* For __builtin_return_address, get the return address from that frame. */
805 #ifdef RETURN_ADDR_RTX
806 tem = RETURN_ADDR_RTX (count, tem);
807 #else
808 tem = memory_address (Pmode,
809 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
810 tem = gen_frame_mem (Pmode, tem);
811 #endif
812 return tem;
815 /* Alias set used for setjmp buffer. */
816 static alias_set_type setjmp_alias_set = -1;
818 /* Construct the leading half of a __builtin_setjmp call. Control will
819 return to RECEIVER_LABEL. This is also called directly by the SJLJ
820 exception handling code. */
822 void
823 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
825 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
826 rtx stack_save;
827 rtx mem;
829 if (setjmp_alias_set == -1)
830 setjmp_alias_set = new_alias_set ();
832 buf_addr = convert_memory_address (Pmode, buf_addr);
834 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
836 /* We store the frame pointer and the address of receiver_label in
837 the buffer and use the rest of it for the stack save area, which
838 is machine-dependent. */
840 mem = gen_rtx_MEM (Pmode, buf_addr);
841 set_mem_alias_set (mem, setjmp_alias_set);
842 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
844 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
845 GET_MODE_SIZE (Pmode))),
846 set_mem_alias_set (mem, setjmp_alias_set);
848 emit_move_insn (validize_mem (mem),
849 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
851 stack_save = gen_rtx_MEM (sa_mode,
852 plus_constant (Pmode, buf_addr,
853 2 * GET_MODE_SIZE (Pmode)));
854 set_mem_alias_set (stack_save, setjmp_alias_set);
855 emit_stack_save (SAVE_NONLOCAL, &stack_save);
857 /* If there is further processing to do, do it. */
858 #ifdef HAVE_builtin_setjmp_setup
859 if (HAVE_builtin_setjmp_setup)
860 emit_insn (gen_builtin_setjmp_setup (buf_addr));
861 #endif
863 /* We have a nonlocal label. */
864 cfun->has_nonlocal_label = 1;
867 /* Construct the trailing part of a __builtin_setjmp call. This is
868 also called directly by the SJLJ exception handling code.
869 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
871 void
872 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
874 rtx chain;
876 /* Mark the FP as used when we get here, so we have to make sure it's
877 marked as used by this function. */
878 emit_use (hard_frame_pointer_rtx);
880 /* Mark the static chain as clobbered here so life information
881 doesn't get messed up for it. */
882 chain = targetm.calls.static_chain (current_function_decl, true);
883 if (chain && REG_P (chain))
884 emit_clobber (chain);
886 /* Now put in the code to restore the frame pointer, and argument
887 pointer, if needed. */
888 #ifdef HAVE_nonlocal_goto
889 if (! HAVE_nonlocal_goto)
890 #endif
892 /* First adjust our frame pointer to its actual value. It was
893 previously set to the start of the virtual area corresponding to
894 the stacked variables when we branched here and now needs to be
895 adjusted to the actual hardware fp value.
897 Assignments to virtual registers are converted by
898 instantiate_virtual_regs into the corresponding assignment
899 to the underlying register (fp in this case) that makes
900 the original assignment true.
901 So the following insn will actually be decrementing fp by
902 STARTING_FRAME_OFFSET. */
903 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
905 /* Restoring the frame pointer also modifies the hard frame pointer.
906 Mark it used (so that the previous assignment remains live once
907 the frame pointer is eliminated) and clobbered (to represent the
908 implicit update from the assignment). */
909 emit_use (hard_frame_pointer_rtx);
910 emit_clobber (hard_frame_pointer_rtx);
913 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
914 if (fixed_regs[ARG_POINTER_REGNUM])
916 #ifdef ELIMINABLE_REGS
917 /* If the argument pointer can be eliminated in favor of the
918 frame pointer, we don't need to restore it. We assume here
919 that if such an elimination is present, it can always be used.
920 This is the case on all known machines; if we don't make this
921 assumption, we do unnecessary saving on many machines. */
922 size_t i;
923 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
925 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
926 if (elim_regs[i].from == ARG_POINTER_REGNUM
927 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
928 break;
930 if (i == ARRAY_SIZE (elim_regs))
931 #endif
933 /* Now restore our arg pointer from the address at which it
934 was saved in our stack frame. */
935 emit_move_insn (crtl->args.internal_arg_pointer,
936 copy_to_reg (get_arg_pointer_save_area ()));
939 #endif
941 #ifdef HAVE_builtin_setjmp_receiver
942 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
943 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
944 else
945 #endif
946 #ifdef HAVE_nonlocal_goto_receiver
947 if (HAVE_nonlocal_goto_receiver)
948 emit_insn (gen_nonlocal_goto_receiver ());
949 else
950 #endif
951 { /* Nothing */ }
953 /* We must not allow the code we just generated to be reordered by
954 scheduling. Specifically, the update of the frame pointer must
955 happen immediately, not later. */
956 emit_insn (gen_blockage ());
959 /* __builtin_longjmp is passed a pointer to an array of five words (not
960 all will be used on all machines). It operates similarly to the C
961 library function of the same name, but is more efficient. Much of
962 the code below is copied from the handling of non-local gotos. */
964 static void
965 expand_builtin_longjmp (rtx buf_addr, rtx value)
967 rtx fp, lab, stack;
968 rtx_insn *insn, *last;
969 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
971 /* DRAP is needed for stack realign if longjmp is expanded to current
972 function */
973 if (SUPPORTS_STACK_ALIGNMENT)
974 crtl->need_drap = true;
976 if (setjmp_alias_set == -1)
977 setjmp_alias_set = new_alias_set ();
979 buf_addr = convert_memory_address (Pmode, buf_addr);
981 buf_addr = force_reg (Pmode, buf_addr);
983 /* We require that the user must pass a second argument of 1, because
984 that is what builtin_setjmp will return. */
985 gcc_assert (value == const1_rtx);
987 last = get_last_insn ();
988 #ifdef HAVE_builtin_longjmp
989 if (HAVE_builtin_longjmp)
990 emit_insn (gen_builtin_longjmp (buf_addr));
991 else
992 #endif
994 fp = gen_rtx_MEM (Pmode, buf_addr);
995 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
996 GET_MODE_SIZE (Pmode)));
998 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
999 2 * GET_MODE_SIZE (Pmode)));
1000 set_mem_alias_set (fp, setjmp_alias_set);
1001 set_mem_alias_set (lab, setjmp_alias_set);
1002 set_mem_alias_set (stack, setjmp_alias_set);
1004 /* Pick up FP, label, and SP from the block and jump. This code is
1005 from expand_goto in stmt.c; see there for detailed comments. */
1006 #ifdef HAVE_nonlocal_goto
1007 if (HAVE_nonlocal_goto)
1008 /* We have to pass a value to the nonlocal_goto pattern that will
1009 get copied into the static_chain pointer, but it does not matter
1010 what that value is, because builtin_setjmp does not use it. */
1011 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1012 else
1013 #endif
1015 lab = copy_to_reg (lab);
1017 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1018 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1020 emit_move_insn (hard_frame_pointer_rtx, fp);
1021 emit_stack_restore (SAVE_NONLOCAL, stack);
1023 emit_use (hard_frame_pointer_rtx);
1024 emit_use (stack_pointer_rtx);
1025 emit_indirect_jump (lab);
1029 /* Search backwards and mark the jump insn as a non-local goto.
1030 Note that this precludes the use of __builtin_longjmp to a
1031 __builtin_setjmp target in the same function. However, we've
1032 already cautioned the user that these functions are for
1033 internal exception handling use only. */
1034 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1036 gcc_assert (insn != last);
1038 if (JUMP_P (insn))
1040 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1041 break;
1043 else if (CALL_P (insn))
1044 break;
1048 static inline bool
1049 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1051 return (iter->i < iter->n);
1054 /* This function validates the types of a function call argument list
1055 against a specified list of tree_codes. If the last specifier is a 0,
1056 that represents an ellipses, otherwise the last specifier must be a
1057 VOID_TYPE. */
1059 static bool
1060 validate_arglist (const_tree callexpr, ...)
1062 enum tree_code code;
1063 bool res = 0;
1064 va_list ap;
1065 const_call_expr_arg_iterator iter;
1066 const_tree arg;
1068 va_start (ap, callexpr);
1069 init_const_call_expr_arg_iterator (callexpr, &iter);
1073 code = (enum tree_code) va_arg (ap, int);
1074 switch (code)
1076 case 0:
1077 /* This signifies an ellipses, any further arguments are all ok. */
1078 res = true;
1079 goto end;
1080 case VOID_TYPE:
1081 /* This signifies an endlink, if no arguments remain, return
1082 true, otherwise return false. */
1083 res = !more_const_call_expr_args_p (&iter);
1084 goto end;
1085 default:
1086 /* If no parameters remain or the parameter's code does not
1087 match the specified code, return false. Otherwise continue
1088 checking any remaining arguments. */
1089 arg = next_const_call_expr_arg (&iter);
1090 if (!validate_arg (arg, code))
1091 goto end;
1092 break;
1095 while (1);
1097 /* We need gotos here since we can only have one VA_CLOSE in a
1098 function. */
1099 end: ;
1100 va_end (ap);
1102 return res;
1105 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1106 and the address of the save area. */
1108 static rtx
1109 expand_builtin_nonlocal_goto (tree exp)
1111 tree t_label, t_save_area;
1112 rtx r_label, r_save_area, r_fp, r_sp;
1113 rtx_insn *insn;
1115 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1116 return NULL_RTX;
1118 t_label = CALL_EXPR_ARG (exp, 0);
1119 t_save_area = CALL_EXPR_ARG (exp, 1);
1121 r_label = expand_normal (t_label);
1122 r_label = convert_memory_address (Pmode, r_label);
1123 r_save_area = expand_normal (t_save_area);
1124 r_save_area = convert_memory_address (Pmode, r_save_area);
1125 /* Copy the address of the save location to a register just in case it was
1126 based on the frame pointer. */
1127 r_save_area = copy_to_reg (r_save_area);
1128 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1129 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1130 plus_constant (Pmode, r_save_area,
1131 GET_MODE_SIZE (Pmode)));
1133 crtl->has_nonlocal_goto = 1;
1135 #ifdef HAVE_nonlocal_goto
1136 /* ??? We no longer need to pass the static chain value, afaik. */
1137 if (HAVE_nonlocal_goto)
1138 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1139 else
1140 #endif
1142 r_label = copy_to_reg (r_label);
1144 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1145 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1147 /* Restore frame pointer for containing function. */
1148 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1149 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1151 /* USE of hard_frame_pointer_rtx added for consistency;
1152 not clear if really needed. */
1153 emit_use (hard_frame_pointer_rtx);
1154 emit_use (stack_pointer_rtx);
1156 /* If the architecture is using a GP register, we must
1157 conservatively assume that the target function makes use of it.
1158 The prologue of functions with nonlocal gotos must therefore
1159 initialize the GP register to the appropriate value, and we
1160 must then make sure that this value is live at the point
1161 of the jump. (Note that this doesn't necessarily apply
1162 to targets with a nonlocal_goto pattern; they are free
1163 to implement it in their own way. Note also that this is
1164 a no-op if the GP register is a global invariant.) */
1165 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1166 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1167 emit_use (pic_offset_table_rtx);
1169 emit_indirect_jump (r_label);
1172 /* Search backwards to the jump insn and mark it as a
1173 non-local goto. */
1174 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1176 if (JUMP_P (insn))
1178 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1179 break;
1181 else if (CALL_P (insn))
1182 break;
1185 return const0_rtx;
1188 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1189 (not all will be used on all machines) that was passed to __builtin_setjmp.
1190 It updates the stack pointer in that block to correspond to the current
1191 stack pointer. */
1193 static void
1194 expand_builtin_update_setjmp_buf (rtx buf_addr)
1196 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1197 rtx stack_save
1198 = gen_rtx_MEM (sa_mode,
1199 memory_address
1200 (sa_mode,
1201 plus_constant (Pmode, buf_addr,
1202 2 * GET_MODE_SIZE (Pmode))));
1204 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1207 /* Expand a call to __builtin_prefetch. For a target that does not support
1208 data prefetch, evaluate the memory address argument in case it has side
1209 effects. */
1211 static void
1212 expand_builtin_prefetch (tree exp)
1214 tree arg0, arg1, arg2;
1215 int nargs;
1216 rtx op0, op1, op2;
1218 if (!validate_arglist (exp, POINTER_TYPE, 0))
1219 return;
1221 arg0 = CALL_EXPR_ARG (exp, 0);
1223 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1224 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1225 locality). */
1226 nargs = call_expr_nargs (exp);
1227 if (nargs > 1)
1228 arg1 = CALL_EXPR_ARG (exp, 1);
1229 else
1230 arg1 = integer_zero_node;
1231 if (nargs > 2)
1232 arg2 = CALL_EXPR_ARG (exp, 2);
1233 else
1234 arg2 = integer_three_node;
1236 /* Argument 0 is an address. */
1237 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1239 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1240 if (TREE_CODE (arg1) != INTEGER_CST)
1242 error ("second argument to %<__builtin_prefetch%> must be a constant");
1243 arg1 = integer_zero_node;
1245 op1 = expand_normal (arg1);
1246 /* Argument 1 must be either zero or one. */
1247 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1249 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1250 " using zero");
1251 op1 = const0_rtx;
1254 /* Argument 2 (locality) must be a compile-time constant int. */
1255 if (TREE_CODE (arg2) != INTEGER_CST)
1257 error ("third argument to %<__builtin_prefetch%> must be a constant");
1258 arg2 = integer_zero_node;
1260 op2 = expand_normal (arg2);
1261 /* Argument 2 must be 0, 1, 2, or 3. */
1262 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1264 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1265 op2 = const0_rtx;
1268 #ifdef HAVE_prefetch
1269 if (HAVE_prefetch)
1271 struct expand_operand ops[3];
1273 create_address_operand (&ops[0], op0);
1274 create_integer_operand (&ops[1], INTVAL (op1));
1275 create_integer_operand (&ops[2], INTVAL (op2));
1276 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1277 return;
1279 #endif
1281 /* Don't do anything with direct references to volatile memory, but
1282 generate code to handle other side effects. */
1283 if (!MEM_P (op0) && side_effects_p (op0))
1284 emit_insn (op0);
1287 /* Get a MEM rtx for expression EXP which is the address of an operand
1288 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1289 the maximum length of the block of memory that might be accessed or
1290 NULL if unknown. */
1292 static rtx
1293 get_memory_rtx (tree exp, tree len)
1295 tree orig_exp = exp;
1296 rtx addr, mem;
1298 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1299 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1300 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1301 exp = TREE_OPERAND (exp, 0);
1303 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1304 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1306 /* Get an expression we can use to find the attributes to assign to MEM.
1307 First remove any nops. */
1308 while (CONVERT_EXPR_P (exp)
1309 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1310 exp = TREE_OPERAND (exp, 0);
1312 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1313 (as builtin stringops may alias with anything). */
1314 exp = fold_build2 (MEM_REF,
1315 build_array_type (char_type_node,
1316 build_range_type (sizetype,
1317 size_one_node, len)),
1318 exp, build_int_cst (ptr_type_node, 0));
1320 /* If the MEM_REF has no acceptable address, try to get the base object
1321 from the original address we got, and build an all-aliasing
1322 unknown-sized access to that one. */
1323 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1324 set_mem_attributes (mem, exp, 0);
1325 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1326 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1327 0))))
1329 exp = build_fold_addr_expr (exp);
1330 exp = fold_build2 (MEM_REF,
1331 build_array_type (char_type_node,
1332 build_range_type (sizetype,
1333 size_zero_node,
1334 NULL)),
1335 exp, build_int_cst (ptr_type_node, 0));
1336 set_mem_attributes (mem, exp, 0);
1338 set_mem_alias_set (mem, 0);
1339 return mem;
1342 /* Built-in functions to perform an untyped call and return. */
1344 #define apply_args_mode \
1345 (this_target_builtins->x_apply_args_mode)
1346 #define apply_result_mode \
1347 (this_target_builtins->x_apply_result_mode)
1349 /* Return the size required for the block returned by __builtin_apply_args,
1350 and initialize apply_args_mode. */
1352 static int
1353 apply_args_size (void)
1355 static int size = -1;
1356 int align;
1357 unsigned int regno;
1358 enum machine_mode mode;
1360 /* The values computed by this function never change. */
1361 if (size < 0)
1363 /* The first value is the incoming arg-pointer. */
1364 size = GET_MODE_SIZE (Pmode);
1366 /* The second value is the structure value address unless this is
1367 passed as an "invisible" first argument. */
1368 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1369 size += GET_MODE_SIZE (Pmode);
1371 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1372 if (FUNCTION_ARG_REGNO_P (regno))
1374 mode = targetm.calls.get_raw_arg_mode (regno);
1376 gcc_assert (mode != VOIDmode);
1378 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1379 if (size % align != 0)
1380 size = CEIL (size, align) * align;
1381 size += GET_MODE_SIZE (mode);
1382 apply_args_mode[regno] = mode;
1384 else
1386 apply_args_mode[regno] = VOIDmode;
1389 return size;
1392 /* Return the size required for the block returned by __builtin_apply,
1393 and initialize apply_result_mode. */
1395 static int
1396 apply_result_size (void)
1398 static int size = -1;
1399 int align, regno;
1400 enum machine_mode mode;
1402 /* The values computed by this function never change. */
1403 if (size < 0)
1405 size = 0;
1407 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1408 if (targetm.calls.function_value_regno_p (regno))
1410 mode = targetm.calls.get_raw_result_mode (regno);
1412 gcc_assert (mode != VOIDmode);
1414 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1415 if (size % align != 0)
1416 size = CEIL (size, align) * align;
1417 size += GET_MODE_SIZE (mode);
1418 apply_result_mode[regno] = mode;
1420 else
1421 apply_result_mode[regno] = VOIDmode;
1423 /* Allow targets that use untyped_call and untyped_return to override
1424 the size so that machine-specific information can be stored here. */
1425 #ifdef APPLY_RESULT_SIZE
1426 size = APPLY_RESULT_SIZE;
1427 #endif
1429 return size;
1432 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1433 /* Create a vector describing the result block RESULT. If SAVEP is true,
1434 the result block is used to save the values; otherwise it is used to
1435 restore the values. */
1437 static rtx
1438 result_vector (int savep, rtx result)
1440 int regno, size, align, nelts;
1441 enum machine_mode mode;
1442 rtx reg, mem;
1443 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1445 size = nelts = 0;
1446 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1447 if ((mode = apply_result_mode[regno]) != VOIDmode)
1449 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1450 if (size % align != 0)
1451 size = CEIL (size, align) * align;
1452 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1453 mem = adjust_address (result, mode, size);
1454 savevec[nelts++] = (savep
1455 ? gen_rtx_SET (VOIDmode, mem, reg)
1456 : gen_rtx_SET (VOIDmode, reg, mem));
1457 size += GET_MODE_SIZE (mode);
1459 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1461 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1463 /* Save the state required to perform an untyped call with the same
1464 arguments as were passed to the current function. */
1466 static rtx
1467 expand_builtin_apply_args_1 (void)
1469 rtx registers, tem;
1470 int size, align, regno;
1471 enum machine_mode mode;
1472 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1474 /* Create a block where the arg-pointer, structure value address,
1475 and argument registers can be saved. */
1476 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1478 /* Walk past the arg-pointer and structure value address. */
1479 size = GET_MODE_SIZE (Pmode);
1480 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1481 size += GET_MODE_SIZE (Pmode);
1483 /* Save each register used in calling a function to the block. */
1484 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1485 if ((mode = apply_args_mode[regno]) != VOIDmode)
1487 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1488 if (size % align != 0)
1489 size = CEIL (size, align) * align;
1491 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1493 emit_move_insn (adjust_address (registers, mode, size), tem);
1494 size += GET_MODE_SIZE (mode);
1497 /* Save the arg pointer to the block. */
1498 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1499 #ifdef STACK_GROWS_DOWNWARD
1500 /* We need the pointer as the caller actually passed them to us, not
1501 as we might have pretended they were passed. Make sure it's a valid
1502 operand, as emit_move_insn isn't expected to handle a PLUS. */
1504 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1505 NULL_RTX);
1506 #endif
1507 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1509 size = GET_MODE_SIZE (Pmode);
1511 /* Save the structure value address unless this is passed as an
1512 "invisible" first argument. */
1513 if (struct_incoming_value)
1515 emit_move_insn (adjust_address (registers, Pmode, size),
1516 copy_to_reg (struct_incoming_value));
1517 size += GET_MODE_SIZE (Pmode);
1520 /* Return the address of the block. */
1521 return copy_addr_to_reg (XEXP (registers, 0));
1524 /* __builtin_apply_args returns block of memory allocated on
1525 the stack into which is stored the arg pointer, structure
1526 value address, static chain, and all the registers that might
1527 possibly be used in performing a function call. The code is
1528 moved to the start of the function so the incoming values are
1529 saved. */
1531 static rtx
1532 expand_builtin_apply_args (void)
1534 /* Don't do __builtin_apply_args more than once in a function.
1535 Save the result of the first call and reuse it. */
1536 if (apply_args_value != 0)
1537 return apply_args_value;
1539 /* When this function is called, it means that registers must be
1540 saved on entry to this function. So we migrate the
1541 call to the first insn of this function. */
1542 rtx temp;
1543 rtx seq;
1545 start_sequence ();
1546 temp = expand_builtin_apply_args_1 ();
1547 seq = get_insns ();
1548 end_sequence ();
1550 apply_args_value = temp;
1552 /* Put the insns after the NOTE that starts the function.
1553 If this is inside a start_sequence, make the outer-level insn
1554 chain current, so the code is placed at the start of the
1555 function. If internal_arg_pointer is a non-virtual pseudo,
1556 it needs to be placed after the function that initializes
1557 that pseudo. */
1558 push_topmost_sequence ();
1559 if (REG_P (crtl->args.internal_arg_pointer)
1560 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1561 emit_insn_before (seq, parm_birth_insn);
1562 else
1563 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1564 pop_topmost_sequence ();
1565 return temp;
1569 /* Perform an untyped call and save the state required to perform an
1570 untyped return of whatever value was returned by the given function. */
1572 static rtx
1573 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1575 int size, align, regno;
1576 enum machine_mode mode;
1577 rtx incoming_args, result, reg, dest, src;
1578 rtx_call_insn *call_insn;
1579 rtx old_stack_level = 0;
1580 rtx call_fusage = 0;
1581 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1583 arguments = convert_memory_address (Pmode, arguments);
1585 /* Create a block where the return registers can be saved. */
1586 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1588 /* Fetch the arg pointer from the ARGUMENTS block. */
1589 incoming_args = gen_reg_rtx (Pmode);
1590 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1591 #ifndef STACK_GROWS_DOWNWARD
1592 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1593 incoming_args, 0, OPTAB_LIB_WIDEN);
1594 #endif
1596 /* Push a new argument block and copy the arguments. Do not allow
1597 the (potential) memcpy call below to interfere with our stack
1598 manipulations. */
1599 do_pending_stack_adjust ();
1600 NO_DEFER_POP;
1602 /* Save the stack with nonlocal if available. */
1603 #ifdef HAVE_save_stack_nonlocal
1604 if (HAVE_save_stack_nonlocal)
1605 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1606 else
1607 #endif
1608 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1610 /* Allocate a block of memory onto the stack and copy the memory
1611 arguments to the outgoing arguments address. We can pass TRUE
1612 as the 4th argument because we just saved the stack pointer
1613 and will restore it right after the call. */
1614 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1616 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1617 may have already set current_function_calls_alloca to true.
1618 current_function_calls_alloca won't be set if argsize is zero,
1619 so we have to guarantee need_drap is true here. */
1620 if (SUPPORTS_STACK_ALIGNMENT)
1621 crtl->need_drap = true;
1623 dest = virtual_outgoing_args_rtx;
1624 #ifndef STACK_GROWS_DOWNWARD
1625 if (CONST_INT_P (argsize))
1626 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1627 else
1628 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1629 #endif
1630 dest = gen_rtx_MEM (BLKmode, dest);
1631 set_mem_align (dest, PARM_BOUNDARY);
1632 src = gen_rtx_MEM (BLKmode, incoming_args);
1633 set_mem_align (src, PARM_BOUNDARY);
1634 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1636 /* Refer to the argument block. */
1637 apply_args_size ();
1638 arguments = gen_rtx_MEM (BLKmode, arguments);
1639 set_mem_align (arguments, PARM_BOUNDARY);
1641 /* Walk past the arg-pointer and structure value address. */
1642 size = GET_MODE_SIZE (Pmode);
1643 if (struct_value)
1644 size += GET_MODE_SIZE (Pmode);
1646 /* Restore each of the registers previously saved. Make USE insns
1647 for each of these registers for use in making the call. */
1648 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1649 if ((mode = apply_args_mode[regno]) != VOIDmode)
1651 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1652 if (size % align != 0)
1653 size = CEIL (size, align) * align;
1654 reg = gen_rtx_REG (mode, regno);
1655 emit_move_insn (reg, adjust_address (arguments, mode, size));
1656 use_reg (&call_fusage, reg);
1657 size += GET_MODE_SIZE (mode);
1660 /* Restore the structure value address unless this is passed as an
1661 "invisible" first argument. */
1662 size = GET_MODE_SIZE (Pmode);
1663 if (struct_value)
1665 rtx value = gen_reg_rtx (Pmode);
1666 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1667 emit_move_insn (struct_value, value);
1668 if (REG_P (struct_value))
1669 use_reg (&call_fusage, struct_value);
1670 size += GET_MODE_SIZE (Pmode);
1673 /* All arguments and registers used for the call are set up by now! */
1674 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1676 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1677 and we don't want to load it into a register as an optimization,
1678 because prepare_call_address already did it if it should be done. */
1679 if (GET_CODE (function) != SYMBOL_REF)
1680 function = memory_address (FUNCTION_MODE, function);
1682 /* Generate the actual call instruction and save the return value. */
1683 #ifdef HAVE_untyped_call
1684 if (HAVE_untyped_call)
1685 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1686 result, result_vector (1, result)));
1687 else
1688 #endif
1689 #ifdef HAVE_call_value
1690 if (HAVE_call_value)
1692 rtx valreg = 0;
1694 /* Locate the unique return register. It is not possible to
1695 express a call that sets more than one return register using
1696 call_value; use untyped_call for that. In fact, untyped_call
1697 only needs to save the return registers in the given block. */
1698 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1699 if ((mode = apply_result_mode[regno]) != VOIDmode)
1701 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1703 valreg = gen_rtx_REG (mode, regno);
1706 emit_call_insn (GEN_CALL_VALUE (valreg,
1707 gen_rtx_MEM (FUNCTION_MODE, function),
1708 const0_rtx, NULL_RTX, const0_rtx));
1710 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1712 else
1713 #endif
1714 gcc_unreachable ();
1716 /* Find the CALL insn we just emitted, and attach the register usage
1717 information. */
1718 call_insn = last_call_insn ();
1719 add_function_usage_to (call_insn, call_fusage);
1721 /* Restore the stack. */
1722 #ifdef HAVE_save_stack_nonlocal
1723 if (HAVE_save_stack_nonlocal)
1724 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1725 else
1726 #endif
1727 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1728 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1730 OK_DEFER_POP;
1732 /* Return the address of the result block. */
1733 result = copy_addr_to_reg (XEXP (result, 0));
1734 return convert_memory_address (ptr_mode, result);
1737 /* Perform an untyped return. */
1739 static void
1740 expand_builtin_return (rtx result)
1742 int size, align, regno;
1743 enum machine_mode mode;
1744 rtx reg;
1745 rtx_insn *call_fusage = 0;
1747 result = convert_memory_address (Pmode, result);
1749 apply_result_size ();
1750 result = gen_rtx_MEM (BLKmode, result);
1752 #ifdef HAVE_untyped_return
1753 if (HAVE_untyped_return)
1755 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1756 emit_barrier ();
1757 return;
1759 #endif
1761 /* Restore the return value and note that each value is used. */
1762 size = 0;
1763 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1764 if ((mode = apply_result_mode[regno]) != VOIDmode)
1766 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1767 if (size % align != 0)
1768 size = CEIL (size, align) * align;
1769 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1770 emit_move_insn (reg, adjust_address (result, mode, size));
1772 push_to_sequence (call_fusage);
1773 emit_use (reg);
1774 call_fusage = get_insns ();
1775 end_sequence ();
1776 size += GET_MODE_SIZE (mode);
1779 /* Put the USE insns before the return. */
1780 emit_insn (call_fusage);
1782 /* Return whatever values was restored by jumping directly to the end
1783 of the function. */
1784 expand_naked_return ();
1787 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1789 static enum type_class
1790 type_to_class (tree type)
1792 switch (TREE_CODE (type))
1794 case VOID_TYPE: return void_type_class;
1795 case INTEGER_TYPE: return integer_type_class;
1796 case ENUMERAL_TYPE: return enumeral_type_class;
1797 case BOOLEAN_TYPE: return boolean_type_class;
1798 case POINTER_TYPE: return pointer_type_class;
1799 case REFERENCE_TYPE: return reference_type_class;
1800 case OFFSET_TYPE: return offset_type_class;
1801 case REAL_TYPE: return real_type_class;
1802 case COMPLEX_TYPE: return complex_type_class;
1803 case FUNCTION_TYPE: return function_type_class;
1804 case METHOD_TYPE: return method_type_class;
1805 case RECORD_TYPE: return record_type_class;
1806 case UNION_TYPE:
1807 case QUAL_UNION_TYPE: return union_type_class;
1808 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1809 ? string_type_class : array_type_class);
1810 case LANG_TYPE: return lang_type_class;
1811 default: return no_type_class;
1815 /* Expand a call EXP to __builtin_classify_type. */
1817 static rtx
1818 expand_builtin_classify_type (tree exp)
1820 if (call_expr_nargs (exp))
1821 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1822 return GEN_INT (no_type_class);
1825 /* This helper macro, meant to be used in mathfn_built_in below,
1826 determines which among a set of three builtin math functions is
1827 appropriate for a given type mode. The `F' and `L' cases are
1828 automatically generated from the `double' case. */
1829 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1830 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1831 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1832 fcodel = BUILT_IN_MATHFN##L ; break;
1833 /* Similar to above, but appends _R after any F/L suffix. */
1834 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1835 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1836 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1837 fcodel = BUILT_IN_MATHFN##L_R ; break;
1839 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1840 if available. If IMPLICIT is true use the implicit builtin declaration,
1841 otherwise use the explicit declaration. If we can't do the conversion,
1842 return zero. */
1844 static tree
1845 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1847 enum built_in_function fcode, fcodef, fcodel, fcode2;
1849 switch (fn)
1851 CASE_MATHFN (BUILT_IN_ACOS)
1852 CASE_MATHFN (BUILT_IN_ACOSH)
1853 CASE_MATHFN (BUILT_IN_ASIN)
1854 CASE_MATHFN (BUILT_IN_ASINH)
1855 CASE_MATHFN (BUILT_IN_ATAN)
1856 CASE_MATHFN (BUILT_IN_ATAN2)
1857 CASE_MATHFN (BUILT_IN_ATANH)
1858 CASE_MATHFN (BUILT_IN_CBRT)
1859 CASE_MATHFN (BUILT_IN_CEIL)
1860 CASE_MATHFN (BUILT_IN_CEXPI)
1861 CASE_MATHFN (BUILT_IN_COPYSIGN)
1862 CASE_MATHFN (BUILT_IN_COS)
1863 CASE_MATHFN (BUILT_IN_COSH)
1864 CASE_MATHFN (BUILT_IN_DREM)
1865 CASE_MATHFN (BUILT_IN_ERF)
1866 CASE_MATHFN (BUILT_IN_ERFC)
1867 CASE_MATHFN (BUILT_IN_EXP)
1868 CASE_MATHFN (BUILT_IN_EXP10)
1869 CASE_MATHFN (BUILT_IN_EXP2)
1870 CASE_MATHFN (BUILT_IN_EXPM1)
1871 CASE_MATHFN (BUILT_IN_FABS)
1872 CASE_MATHFN (BUILT_IN_FDIM)
1873 CASE_MATHFN (BUILT_IN_FLOOR)
1874 CASE_MATHFN (BUILT_IN_FMA)
1875 CASE_MATHFN (BUILT_IN_FMAX)
1876 CASE_MATHFN (BUILT_IN_FMIN)
1877 CASE_MATHFN (BUILT_IN_FMOD)
1878 CASE_MATHFN (BUILT_IN_FREXP)
1879 CASE_MATHFN (BUILT_IN_GAMMA)
1880 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1881 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1882 CASE_MATHFN (BUILT_IN_HYPOT)
1883 CASE_MATHFN (BUILT_IN_ILOGB)
1884 CASE_MATHFN (BUILT_IN_ICEIL)
1885 CASE_MATHFN (BUILT_IN_IFLOOR)
1886 CASE_MATHFN (BUILT_IN_INF)
1887 CASE_MATHFN (BUILT_IN_IRINT)
1888 CASE_MATHFN (BUILT_IN_IROUND)
1889 CASE_MATHFN (BUILT_IN_ISINF)
1890 CASE_MATHFN (BUILT_IN_J0)
1891 CASE_MATHFN (BUILT_IN_J1)
1892 CASE_MATHFN (BUILT_IN_JN)
1893 CASE_MATHFN (BUILT_IN_LCEIL)
1894 CASE_MATHFN (BUILT_IN_LDEXP)
1895 CASE_MATHFN (BUILT_IN_LFLOOR)
1896 CASE_MATHFN (BUILT_IN_LGAMMA)
1897 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1898 CASE_MATHFN (BUILT_IN_LLCEIL)
1899 CASE_MATHFN (BUILT_IN_LLFLOOR)
1900 CASE_MATHFN (BUILT_IN_LLRINT)
1901 CASE_MATHFN (BUILT_IN_LLROUND)
1902 CASE_MATHFN (BUILT_IN_LOG)
1903 CASE_MATHFN (BUILT_IN_LOG10)
1904 CASE_MATHFN (BUILT_IN_LOG1P)
1905 CASE_MATHFN (BUILT_IN_LOG2)
1906 CASE_MATHFN (BUILT_IN_LOGB)
1907 CASE_MATHFN (BUILT_IN_LRINT)
1908 CASE_MATHFN (BUILT_IN_LROUND)
1909 CASE_MATHFN (BUILT_IN_MODF)
1910 CASE_MATHFN (BUILT_IN_NAN)
1911 CASE_MATHFN (BUILT_IN_NANS)
1912 CASE_MATHFN (BUILT_IN_NEARBYINT)
1913 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1914 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1915 CASE_MATHFN (BUILT_IN_POW)
1916 CASE_MATHFN (BUILT_IN_POWI)
1917 CASE_MATHFN (BUILT_IN_POW10)
1918 CASE_MATHFN (BUILT_IN_REMAINDER)
1919 CASE_MATHFN (BUILT_IN_REMQUO)
1920 CASE_MATHFN (BUILT_IN_RINT)
1921 CASE_MATHFN (BUILT_IN_ROUND)
1922 CASE_MATHFN (BUILT_IN_SCALB)
1923 CASE_MATHFN (BUILT_IN_SCALBLN)
1924 CASE_MATHFN (BUILT_IN_SCALBN)
1925 CASE_MATHFN (BUILT_IN_SIGNBIT)
1926 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1927 CASE_MATHFN (BUILT_IN_SIN)
1928 CASE_MATHFN (BUILT_IN_SINCOS)
1929 CASE_MATHFN (BUILT_IN_SINH)
1930 CASE_MATHFN (BUILT_IN_SQRT)
1931 CASE_MATHFN (BUILT_IN_TAN)
1932 CASE_MATHFN (BUILT_IN_TANH)
1933 CASE_MATHFN (BUILT_IN_TGAMMA)
1934 CASE_MATHFN (BUILT_IN_TRUNC)
1935 CASE_MATHFN (BUILT_IN_Y0)
1936 CASE_MATHFN (BUILT_IN_Y1)
1937 CASE_MATHFN (BUILT_IN_YN)
1939 default:
1940 return NULL_TREE;
1943 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1944 fcode2 = fcode;
1945 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1946 fcode2 = fcodef;
1947 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1948 fcode2 = fcodel;
1949 else
1950 return NULL_TREE;
1952 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1953 return NULL_TREE;
1955 return builtin_decl_explicit (fcode2);
1958 /* Like mathfn_built_in_1(), but always use the implicit array. */
1960 tree
1961 mathfn_built_in (tree type, enum built_in_function fn)
1963 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1966 /* If errno must be maintained, expand the RTL to check if the result,
1967 TARGET, of a built-in function call, EXP, is NaN, and if so set
1968 errno to EDOM. */
1970 static void
1971 expand_errno_check (tree exp, rtx target)
1973 rtx_code_label *lab = gen_label_rtx ();
1975 /* Test the result; if it is NaN, set errno=EDOM because
1976 the argument was not in the domain. */
1977 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1978 NULL_RTX, NULL_RTX, lab,
1979 /* The jump is very likely. */
1980 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1982 #ifdef TARGET_EDOM
1983 /* If this built-in doesn't throw an exception, set errno directly. */
1984 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1986 #ifdef GEN_ERRNO_RTX
1987 rtx errno_rtx = GEN_ERRNO_RTX;
1988 #else
1989 rtx errno_rtx
1990 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1991 #endif
1992 emit_move_insn (errno_rtx,
1993 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1994 emit_label (lab);
1995 return;
1997 #endif
1999 /* Make sure the library call isn't expanded as a tail call. */
2000 CALL_EXPR_TAILCALL (exp) = 0;
2002 /* We can't set errno=EDOM directly; let the library call do it.
2003 Pop the arguments right away in case the call gets deleted. */
2004 NO_DEFER_POP;
2005 expand_call (exp, target, 0);
2006 OK_DEFER_POP;
2007 emit_label (lab);
2010 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2011 Return NULL_RTX if a normal call should be emitted rather than expanding
2012 the function in-line. EXP is the expression that is a call to the builtin
2013 function; if convenient, the result should be placed in TARGET.
2014 SUBTARGET may be used as the target for computing one of EXP's operands. */
2016 static rtx
2017 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2019 optab builtin_optab;
2020 rtx op0;
2021 rtx_insn *insns;
2022 tree fndecl = get_callee_fndecl (exp);
2023 enum machine_mode mode;
2024 bool errno_set = false;
2025 bool try_widening = false;
2026 tree arg;
2028 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2029 return NULL_RTX;
2031 arg = CALL_EXPR_ARG (exp, 0);
2033 switch (DECL_FUNCTION_CODE (fndecl))
2035 CASE_FLT_FN (BUILT_IN_SQRT):
2036 errno_set = ! tree_expr_nonnegative_p (arg);
2037 try_widening = true;
2038 builtin_optab = sqrt_optab;
2039 break;
2040 CASE_FLT_FN (BUILT_IN_EXP):
2041 errno_set = true; builtin_optab = exp_optab; break;
2042 CASE_FLT_FN (BUILT_IN_EXP10):
2043 CASE_FLT_FN (BUILT_IN_POW10):
2044 errno_set = true; builtin_optab = exp10_optab; break;
2045 CASE_FLT_FN (BUILT_IN_EXP2):
2046 errno_set = true; builtin_optab = exp2_optab; break;
2047 CASE_FLT_FN (BUILT_IN_EXPM1):
2048 errno_set = true; builtin_optab = expm1_optab; break;
2049 CASE_FLT_FN (BUILT_IN_LOGB):
2050 errno_set = true; builtin_optab = logb_optab; break;
2051 CASE_FLT_FN (BUILT_IN_LOG):
2052 errno_set = true; builtin_optab = log_optab; break;
2053 CASE_FLT_FN (BUILT_IN_LOG10):
2054 errno_set = true; builtin_optab = log10_optab; break;
2055 CASE_FLT_FN (BUILT_IN_LOG2):
2056 errno_set = true; builtin_optab = log2_optab; break;
2057 CASE_FLT_FN (BUILT_IN_LOG1P):
2058 errno_set = true; builtin_optab = log1p_optab; break;
2059 CASE_FLT_FN (BUILT_IN_ASIN):
2060 builtin_optab = asin_optab; break;
2061 CASE_FLT_FN (BUILT_IN_ACOS):
2062 builtin_optab = acos_optab; break;
2063 CASE_FLT_FN (BUILT_IN_TAN):
2064 builtin_optab = tan_optab; break;
2065 CASE_FLT_FN (BUILT_IN_ATAN):
2066 builtin_optab = atan_optab; break;
2067 CASE_FLT_FN (BUILT_IN_FLOOR):
2068 builtin_optab = floor_optab; break;
2069 CASE_FLT_FN (BUILT_IN_CEIL):
2070 builtin_optab = ceil_optab; break;
2071 CASE_FLT_FN (BUILT_IN_TRUNC):
2072 builtin_optab = btrunc_optab; break;
2073 CASE_FLT_FN (BUILT_IN_ROUND):
2074 builtin_optab = round_optab; break;
2075 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2076 builtin_optab = nearbyint_optab;
2077 if (flag_trapping_math)
2078 break;
2079 /* Else fallthrough and expand as rint. */
2080 CASE_FLT_FN (BUILT_IN_RINT):
2081 builtin_optab = rint_optab; break;
2082 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2083 builtin_optab = significand_optab; break;
2084 default:
2085 gcc_unreachable ();
2088 /* Make a suitable register to place result in. */
2089 mode = TYPE_MODE (TREE_TYPE (exp));
2091 if (! flag_errno_math || ! HONOR_NANS (mode))
2092 errno_set = false;
2094 /* Before working hard, check whether the instruction is available, but try
2095 to widen the mode for specific operations. */
2096 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2097 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2098 && (!errno_set || !optimize_insn_for_size_p ()))
2100 rtx result = gen_reg_rtx (mode);
2102 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2103 need to expand the argument again. This way, we will not perform
2104 side-effects more the once. */
2105 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2107 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2109 start_sequence ();
2111 /* Compute into RESULT.
2112 Set RESULT to wherever the result comes back. */
2113 result = expand_unop (mode, builtin_optab, op0, result, 0);
2115 if (result != 0)
2117 if (errno_set)
2118 expand_errno_check (exp, result);
2120 /* Output the entire sequence. */
2121 insns = get_insns ();
2122 end_sequence ();
2123 emit_insn (insns);
2124 return result;
2127 /* If we were unable to expand via the builtin, stop the sequence
2128 (without outputting the insns) and call to the library function
2129 with the stabilized argument list. */
2130 end_sequence ();
2133 return expand_call (exp, target, target == const0_rtx);
2136 /* Expand a call to the builtin binary math functions (pow and atan2).
2137 Return NULL_RTX if a normal call should be emitted rather than expanding the
2138 function in-line. EXP is the expression that is a call to the builtin
2139 function; if convenient, the result should be placed in TARGET.
2140 SUBTARGET may be used as the target for computing one of EXP's
2141 operands. */
2143 static rtx
2144 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2146 optab builtin_optab;
2147 rtx op0, op1, result;
2148 rtx_insn *insns;
2149 int op1_type = REAL_TYPE;
2150 tree fndecl = get_callee_fndecl (exp);
2151 tree arg0, arg1;
2152 enum machine_mode mode;
2153 bool errno_set = true;
2155 switch (DECL_FUNCTION_CODE (fndecl))
2157 CASE_FLT_FN (BUILT_IN_SCALBN):
2158 CASE_FLT_FN (BUILT_IN_SCALBLN):
2159 CASE_FLT_FN (BUILT_IN_LDEXP):
2160 op1_type = INTEGER_TYPE;
2161 default:
2162 break;
2165 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2166 return NULL_RTX;
2168 arg0 = CALL_EXPR_ARG (exp, 0);
2169 arg1 = CALL_EXPR_ARG (exp, 1);
2171 switch (DECL_FUNCTION_CODE (fndecl))
2173 CASE_FLT_FN (BUILT_IN_POW):
2174 builtin_optab = pow_optab; break;
2175 CASE_FLT_FN (BUILT_IN_ATAN2):
2176 builtin_optab = atan2_optab; break;
2177 CASE_FLT_FN (BUILT_IN_SCALB):
2178 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2179 return 0;
2180 builtin_optab = scalb_optab; break;
2181 CASE_FLT_FN (BUILT_IN_SCALBN):
2182 CASE_FLT_FN (BUILT_IN_SCALBLN):
2183 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2184 return 0;
2185 /* Fall through... */
2186 CASE_FLT_FN (BUILT_IN_LDEXP):
2187 builtin_optab = ldexp_optab; break;
2188 CASE_FLT_FN (BUILT_IN_FMOD):
2189 builtin_optab = fmod_optab; break;
2190 CASE_FLT_FN (BUILT_IN_REMAINDER):
2191 CASE_FLT_FN (BUILT_IN_DREM):
2192 builtin_optab = remainder_optab; break;
2193 default:
2194 gcc_unreachable ();
2197 /* Make a suitable register to place result in. */
2198 mode = TYPE_MODE (TREE_TYPE (exp));
2200 /* Before working hard, check whether the instruction is available. */
2201 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2202 return NULL_RTX;
2204 result = gen_reg_rtx (mode);
2206 if (! flag_errno_math || ! HONOR_NANS (mode))
2207 errno_set = false;
2209 if (errno_set && optimize_insn_for_size_p ())
2210 return 0;
2212 /* Always stabilize the argument list. */
2213 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2214 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2216 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2217 op1 = expand_normal (arg1);
2219 start_sequence ();
2221 /* Compute into RESULT.
2222 Set RESULT to wherever the result comes back. */
2223 result = expand_binop (mode, builtin_optab, op0, op1,
2224 result, 0, OPTAB_DIRECT);
2226 /* If we were unable to expand via the builtin, stop the sequence
2227 (without outputting the insns) and call to the library function
2228 with the stabilized argument list. */
2229 if (result == 0)
2231 end_sequence ();
2232 return expand_call (exp, target, target == const0_rtx);
2235 if (errno_set)
2236 expand_errno_check (exp, result);
2238 /* Output the entire sequence. */
2239 insns = get_insns ();
2240 end_sequence ();
2241 emit_insn (insns);
2243 return result;
2246 /* Expand a call to the builtin trinary math functions (fma).
2247 Return NULL_RTX if a normal call should be emitted rather than expanding the
2248 function in-line. EXP is the expression that is a call to the builtin
2249 function; if convenient, the result should be placed in TARGET.
2250 SUBTARGET may be used as the target for computing one of EXP's
2251 operands. */
2253 static rtx
2254 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2256 optab builtin_optab;
2257 rtx op0, op1, op2, result;
2258 rtx_insn *insns;
2259 tree fndecl = get_callee_fndecl (exp);
2260 tree arg0, arg1, arg2;
2261 enum machine_mode mode;
2263 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2264 return NULL_RTX;
2266 arg0 = CALL_EXPR_ARG (exp, 0);
2267 arg1 = CALL_EXPR_ARG (exp, 1);
2268 arg2 = CALL_EXPR_ARG (exp, 2);
2270 switch (DECL_FUNCTION_CODE (fndecl))
2272 CASE_FLT_FN (BUILT_IN_FMA):
2273 builtin_optab = fma_optab; break;
2274 default:
2275 gcc_unreachable ();
2278 /* Make a suitable register to place result in. */
2279 mode = TYPE_MODE (TREE_TYPE (exp));
2281 /* Before working hard, check whether the instruction is available. */
2282 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2283 return NULL_RTX;
2285 result = gen_reg_rtx (mode);
2287 /* Always stabilize the argument list. */
2288 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2289 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2290 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2292 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2293 op1 = expand_normal (arg1);
2294 op2 = expand_normal (arg2);
2296 start_sequence ();
2298 /* Compute into RESULT.
2299 Set RESULT to wherever the result comes back. */
2300 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2301 result, 0);
2303 /* If we were unable to expand via the builtin, stop the sequence
2304 (without outputting the insns) and call to the library function
2305 with the stabilized argument list. */
2306 if (result == 0)
2308 end_sequence ();
2309 return expand_call (exp, target, target == const0_rtx);
2312 /* Output the entire sequence. */
2313 insns = get_insns ();
2314 end_sequence ();
2315 emit_insn (insns);
2317 return result;
2320 /* Expand a call to the builtin sin and cos math functions.
2321 Return NULL_RTX if a normal call should be emitted rather than expanding the
2322 function in-line. EXP is the expression that is a call to the builtin
2323 function; if convenient, the result should be placed in TARGET.
2324 SUBTARGET may be used as the target for computing one of EXP's
2325 operands. */
2327 static rtx
2328 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2330 optab builtin_optab;
2331 rtx op0;
2332 rtx_insn *insns;
2333 tree fndecl = get_callee_fndecl (exp);
2334 enum machine_mode mode;
2335 tree arg;
2337 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2338 return NULL_RTX;
2340 arg = CALL_EXPR_ARG (exp, 0);
2342 switch (DECL_FUNCTION_CODE (fndecl))
2344 CASE_FLT_FN (BUILT_IN_SIN):
2345 CASE_FLT_FN (BUILT_IN_COS):
2346 builtin_optab = sincos_optab; break;
2347 default:
2348 gcc_unreachable ();
2351 /* Make a suitable register to place result in. */
2352 mode = TYPE_MODE (TREE_TYPE (exp));
2354 /* Check if sincos insn is available, otherwise fallback
2355 to sin or cos insn. */
2356 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2357 switch (DECL_FUNCTION_CODE (fndecl))
2359 CASE_FLT_FN (BUILT_IN_SIN):
2360 builtin_optab = sin_optab; break;
2361 CASE_FLT_FN (BUILT_IN_COS):
2362 builtin_optab = cos_optab; break;
2363 default:
2364 gcc_unreachable ();
2367 /* Before working hard, check whether the instruction is available. */
2368 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2370 rtx result = gen_reg_rtx (mode);
2372 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2373 need to expand the argument again. This way, we will not perform
2374 side-effects more the once. */
2375 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2377 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2379 start_sequence ();
2381 /* Compute into RESULT.
2382 Set RESULT to wherever the result comes back. */
2383 if (builtin_optab == sincos_optab)
2385 int ok;
2387 switch (DECL_FUNCTION_CODE (fndecl))
2389 CASE_FLT_FN (BUILT_IN_SIN):
2390 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2391 break;
2392 CASE_FLT_FN (BUILT_IN_COS):
2393 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2394 break;
2395 default:
2396 gcc_unreachable ();
2398 gcc_assert (ok);
2400 else
2401 result = expand_unop (mode, builtin_optab, op0, result, 0);
2403 if (result != 0)
2405 /* Output the entire sequence. */
2406 insns = get_insns ();
2407 end_sequence ();
2408 emit_insn (insns);
2409 return result;
2412 /* If we were unable to expand via the builtin, stop the sequence
2413 (without outputting the insns) and call to the library function
2414 with the stabilized argument list. */
2415 end_sequence ();
2418 return expand_call (exp, target, target == const0_rtx);
2421 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2422 return an RTL instruction code that implements the functionality.
2423 If that isn't possible or available return CODE_FOR_nothing. */
2425 static enum insn_code
2426 interclass_mathfn_icode (tree arg, tree fndecl)
2428 bool errno_set = false;
2429 optab builtin_optab = unknown_optab;
2430 enum machine_mode mode;
2432 switch (DECL_FUNCTION_CODE (fndecl))
2434 CASE_FLT_FN (BUILT_IN_ILOGB):
2435 errno_set = true; builtin_optab = ilogb_optab; break;
2436 CASE_FLT_FN (BUILT_IN_ISINF):
2437 builtin_optab = isinf_optab; break;
2438 case BUILT_IN_ISNORMAL:
2439 case BUILT_IN_ISFINITE:
2440 CASE_FLT_FN (BUILT_IN_FINITE):
2441 case BUILT_IN_FINITED32:
2442 case BUILT_IN_FINITED64:
2443 case BUILT_IN_FINITED128:
2444 case BUILT_IN_ISINFD32:
2445 case BUILT_IN_ISINFD64:
2446 case BUILT_IN_ISINFD128:
2447 /* These builtins have no optabs (yet). */
2448 break;
2449 default:
2450 gcc_unreachable ();
2453 /* There's no easy way to detect the case we need to set EDOM. */
2454 if (flag_errno_math && errno_set)
2455 return CODE_FOR_nothing;
2457 /* Optab mode depends on the mode of the input argument. */
2458 mode = TYPE_MODE (TREE_TYPE (arg));
2460 if (builtin_optab)
2461 return optab_handler (builtin_optab, mode);
2462 return CODE_FOR_nothing;
2465 /* Expand a call to one of the builtin math functions that operate on
2466 floating point argument and output an integer result (ilogb, isinf,
2467 isnan, etc).
2468 Return 0 if a normal call should be emitted rather than expanding the
2469 function in-line. EXP is the expression that is a call to the builtin
2470 function; if convenient, the result should be placed in TARGET. */
2472 static rtx
2473 expand_builtin_interclass_mathfn (tree exp, rtx target)
2475 enum insn_code icode = CODE_FOR_nothing;
2476 rtx op0;
2477 tree fndecl = get_callee_fndecl (exp);
2478 enum machine_mode mode;
2479 tree arg;
2481 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2482 return NULL_RTX;
2484 arg = CALL_EXPR_ARG (exp, 0);
2485 icode = interclass_mathfn_icode (arg, fndecl);
2486 mode = TYPE_MODE (TREE_TYPE (arg));
2488 if (icode != CODE_FOR_nothing)
2490 struct expand_operand ops[1];
2491 rtx_insn *last = get_last_insn ();
2492 tree orig_arg = arg;
2494 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2495 need to expand the argument again. This way, we will not perform
2496 side-effects more the once. */
2497 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2499 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2501 if (mode != GET_MODE (op0))
2502 op0 = convert_to_mode (mode, op0, 0);
2504 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2505 if (maybe_legitimize_operands (icode, 0, 1, ops)
2506 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2507 return ops[0].value;
2509 delete_insns_since (last);
2510 CALL_EXPR_ARG (exp, 0) = orig_arg;
2513 return NULL_RTX;
2516 /* Expand a call to the builtin sincos math function.
2517 Return NULL_RTX if a normal call should be emitted rather than expanding the
2518 function in-line. EXP is the expression that is a call to the builtin
2519 function. */
2521 static rtx
2522 expand_builtin_sincos (tree exp)
2524 rtx op0, op1, op2, target1, target2;
2525 enum machine_mode mode;
2526 tree arg, sinp, cosp;
2527 int result;
2528 location_t loc = EXPR_LOCATION (exp);
2529 tree alias_type, alias_off;
2531 if (!validate_arglist (exp, REAL_TYPE,
2532 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2533 return NULL_RTX;
2535 arg = CALL_EXPR_ARG (exp, 0);
2536 sinp = CALL_EXPR_ARG (exp, 1);
2537 cosp = CALL_EXPR_ARG (exp, 2);
2539 /* Make a suitable register to place result in. */
2540 mode = TYPE_MODE (TREE_TYPE (arg));
2542 /* Check if sincos insn is available, otherwise emit the call. */
2543 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2544 return NULL_RTX;
2546 target1 = gen_reg_rtx (mode);
2547 target2 = gen_reg_rtx (mode);
2549 op0 = expand_normal (arg);
2550 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2551 alias_off = build_int_cst (alias_type, 0);
2552 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2553 sinp, alias_off));
2554 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2555 cosp, alias_off));
2557 /* Compute into target1 and target2.
2558 Set TARGET to wherever the result comes back. */
2559 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2560 gcc_assert (result);
2562 /* Move target1 and target2 to the memory locations indicated
2563 by op1 and op2. */
2564 emit_move_insn (op1, target1);
2565 emit_move_insn (op2, target2);
2567 return const0_rtx;
2570 /* Expand a call to the internal cexpi builtin to the sincos math function.
2571 EXP is the expression that is a call to the builtin function; if convenient,
2572 the result should be placed in TARGET. */
2574 static rtx
2575 expand_builtin_cexpi (tree exp, rtx target)
2577 tree fndecl = get_callee_fndecl (exp);
2578 tree arg, type;
2579 enum machine_mode mode;
2580 rtx op0, op1, op2;
2581 location_t loc = EXPR_LOCATION (exp);
2583 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2584 return NULL_RTX;
2586 arg = CALL_EXPR_ARG (exp, 0);
2587 type = TREE_TYPE (arg);
2588 mode = TYPE_MODE (TREE_TYPE (arg));
2590 /* Try expanding via a sincos optab, fall back to emitting a libcall
2591 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2592 is only generated from sincos, cexp or if we have either of them. */
2593 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2595 op1 = gen_reg_rtx (mode);
2596 op2 = gen_reg_rtx (mode);
2598 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2600 /* Compute into op1 and op2. */
2601 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2603 else if (targetm.libc_has_function (function_sincos))
2605 tree call, fn = NULL_TREE;
2606 tree top1, top2;
2607 rtx op1a, op2a;
2609 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2610 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2611 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2612 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2613 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2614 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2615 else
2616 gcc_unreachable ();
2618 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2619 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2620 op1a = copy_addr_to_reg (XEXP (op1, 0));
2621 op2a = copy_addr_to_reg (XEXP (op2, 0));
2622 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2623 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2625 /* Make sure not to fold the sincos call again. */
2626 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2627 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2628 call, 3, arg, top1, top2));
2630 else
2632 tree call, fn = NULL_TREE, narg;
2633 tree ctype = build_complex_type (type);
2635 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2636 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2637 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2638 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2639 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2640 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2641 else
2642 gcc_unreachable ();
2644 /* If we don't have a decl for cexp create one. This is the
2645 friendliest fallback if the user calls __builtin_cexpi
2646 without full target C99 function support. */
2647 if (fn == NULL_TREE)
2649 tree fntype;
2650 const char *name = NULL;
2652 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2653 name = "cexpf";
2654 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2655 name = "cexp";
2656 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2657 name = "cexpl";
2659 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2660 fn = build_fn_decl (name, fntype);
2663 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2664 build_real (type, dconst0), arg);
2666 /* Make sure not to fold the cexp call again. */
2667 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2668 return expand_expr (build_call_nary (ctype, call, 1, narg),
2669 target, VOIDmode, EXPAND_NORMAL);
2672 /* Now build the proper return type. */
2673 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2674 make_tree (TREE_TYPE (arg), op2),
2675 make_tree (TREE_TYPE (arg), op1)),
2676 target, VOIDmode, EXPAND_NORMAL);
2679 /* Conveniently construct a function call expression. FNDECL names the
2680 function to be called, N is the number of arguments, and the "..."
2681 parameters are the argument expressions. Unlike build_call_exr
2682 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2684 static tree
2685 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2687 va_list ap;
2688 tree fntype = TREE_TYPE (fndecl);
2689 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2691 va_start (ap, n);
2692 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2693 va_end (ap);
2694 SET_EXPR_LOCATION (fn, loc);
2695 return fn;
2698 /* Expand a call to one of the builtin rounding functions gcc defines
2699 as an extension (lfloor and lceil). As these are gcc extensions we
2700 do not need to worry about setting errno to EDOM.
2701 If expanding via optab fails, lower expression to (int)(floor(x)).
2702 EXP is the expression that is a call to the builtin function;
2703 if convenient, the result should be placed in TARGET. */
2705 static rtx
2706 expand_builtin_int_roundingfn (tree exp, rtx target)
2708 convert_optab builtin_optab;
2709 rtx op0, tmp;
2710 rtx_insn *insns;
2711 tree fndecl = get_callee_fndecl (exp);
2712 enum built_in_function fallback_fn;
2713 tree fallback_fndecl;
2714 enum machine_mode mode;
2715 tree arg;
2717 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2718 gcc_unreachable ();
2720 arg = CALL_EXPR_ARG (exp, 0);
2722 switch (DECL_FUNCTION_CODE (fndecl))
2724 CASE_FLT_FN (BUILT_IN_ICEIL):
2725 CASE_FLT_FN (BUILT_IN_LCEIL):
2726 CASE_FLT_FN (BUILT_IN_LLCEIL):
2727 builtin_optab = lceil_optab;
2728 fallback_fn = BUILT_IN_CEIL;
2729 break;
2731 CASE_FLT_FN (BUILT_IN_IFLOOR):
2732 CASE_FLT_FN (BUILT_IN_LFLOOR):
2733 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2734 builtin_optab = lfloor_optab;
2735 fallback_fn = BUILT_IN_FLOOR;
2736 break;
2738 default:
2739 gcc_unreachable ();
2742 /* Make a suitable register to place result in. */
2743 mode = TYPE_MODE (TREE_TYPE (exp));
2745 target = gen_reg_rtx (mode);
2747 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2748 need to expand the argument again. This way, we will not perform
2749 side-effects more the once. */
2750 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2752 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2754 start_sequence ();
2756 /* Compute into TARGET. */
2757 if (expand_sfix_optab (target, op0, builtin_optab))
2759 /* Output the entire sequence. */
2760 insns = get_insns ();
2761 end_sequence ();
2762 emit_insn (insns);
2763 return target;
2766 /* If we were unable to expand via the builtin, stop the sequence
2767 (without outputting the insns). */
2768 end_sequence ();
2770 /* Fall back to floating point rounding optab. */
2771 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2773 /* For non-C99 targets we may end up without a fallback fndecl here
2774 if the user called __builtin_lfloor directly. In this case emit
2775 a call to the floor/ceil variants nevertheless. This should result
2776 in the best user experience for not full C99 targets. */
2777 if (fallback_fndecl == NULL_TREE)
2779 tree fntype;
2780 const char *name = NULL;
2782 switch (DECL_FUNCTION_CODE (fndecl))
2784 case BUILT_IN_ICEIL:
2785 case BUILT_IN_LCEIL:
2786 case BUILT_IN_LLCEIL:
2787 name = "ceil";
2788 break;
2789 case BUILT_IN_ICEILF:
2790 case BUILT_IN_LCEILF:
2791 case BUILT_IN_LLCEILF:
2792 name = "ceilf";
2793 break;
2794 case BUILT_IN_ICEILL:
2795 case BUILT_IN_LCEILL:
2796 case BUILT_IN_LLCEILL:
2797 name = "ceill";
2798 break;
2799 case BUILT_IN_IFLOOR:
2800 case BUILT_IN_LFLOOR:
2801 case BUILT_IN_LLFLOOR:
2802 name = "floor";
2803 break;
2804 case BUILT_IN_IFLOORF:
2805 case BUILT_IN_LFLOORF:
2806 case BUILT_IN_LLFLOORF:
2807 name = "floorf";
2808 break;
2809 case BUILT_IN_IFLOORL:
2810 case BUILT_IN_LFLOORL:
2811 case BUILT_IN_LLFLOORL:
2812 name = "floorl";
2813 break;
2814 default:
2815 gcc_unreachable ();
2818 fntype = build_function_type_list (TREE_TYPE (arg),
2819 TREE_TYPE (arg), NULL_TREE);
2820 fallback_fndecl = build_fn_decl (name, fntype);
2823 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2825 tmp = expand_normal (exp);
2826 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2828 /* Truncate the result of floating point optab to integer
2829 via expand_fix (). */
2830 target = gen_reg_rtx (mode);
2831 expand_fix (target, tmp, 0);
2833 return target;
2836 /* Expand a call to one of the builtin math functions doing integer
2837 conversion (lrint).
2838 Return 0 if a normal call should be emitted rather than expanding the
2839 function in-line. EXP is the expression that is a call to the builtin
2840 function; if convenient, the result should be placed in TARGET. */
2842 static rtx
2843 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2845 convert_optab builtin_optab;
2846 rtx op0;
2847 rtx_insn *insns;
2848 tree fndecl = get_callee_fndecl (exp);
2849 tree arg;
2850 enum machine_mode mode;
2851 enum built_in_function fallback_fn = BUILT_IN_NONE;
2853 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2854 gcc_unreachable ();
2856 arg = CALL_EXPR_ARG (exp, 0);
2858 switch (DECL_FUNCTION_CODE (fndecl))
2860 CASE_FLT_FN (BUILT_IN_IRINT):
2861 fallback_fn = BUILT_IN_LRINT;
2862 /* FALLTHRU */
2863 CASE_FLT_FN (BUILT_IN_LRINT):
2864 CASE_FLT_FN (BUILT_IN_LLRINT):
2865 builtin_optab = lrint_optab;
2866 break;
2868 CASE_FLT_FN (BUILT_IN_IROUND):
2869 fallback_fn = BUILT_IN_LROUND;
2870 /* FALLTHRU */
2871 CASE_FLT_FN (BUILT_IN_LROUND):
2872 CASE_FLT_FN (BUILT_IN_LLROUND):
2873 builtin_optab = lround_optab;
2874 break;
2876 default:
2877 gcc_unreachable ();
2880 /* There's no easy way to detect the case we need to set EDOM. */
2881 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2882 return NULL_RTX;
2884 /* Make a suitable register to place result in. */
2885 mode = TYPE_MODE (TREE_TYPE (exp));
2887 /* There's no easy way to detect the case we need to set EDOM. */
2888 if (!flag_errno_math)
2890 rtx result = gen_reg_rtx (mode);
2892 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2893 need to expand the argument again. This way, we will not perform
2894 side-effects more the once. */
2895 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2897 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2899 start_sequence ();
2901 if (expand_sfix_optab (result, op0, builtin_optab))
2903 /* Output the entire sequence. */
2904 insns = get_insns ();
2905 end_sequence ();
2906 emit_insn (insns);
2907 return result;
2910 /* If we were unable to expand via the builtin, stop the sequence
2911 (without outputting the insns) and call to the library function
2912 with the stabilized argument list. */
2913 end_sequence ();
2916 if (fallback_fn != BUILT_IN_NONE)
2918 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2919 targets, (int) round (x) should never be transformed into
2920 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2921 a call to lround in the hope that the target provides at least some
2922 C99 functions. This should result in the best user experience for
2923 not full C99 targets. */
2924 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2925 fallback_fn, 0);
2927 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2928 fallback_fndecl, 1, arg);
2930 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2931 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2932 return convert_to_mode (mode, target, 0);
2935 return expand_call (exp, target, target == const0_rtx);
2938 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2939 a normal call should be emitted rather than expanding the function
2940 in-line. EXP is the expression that is a call to the builtin
2941 function; if convenient, the result should be placed in TARGET. */
2943 static rtx
2944 expand_builtin_powi (tree exp, rtx target)
2946 tree arg0, arg1;
2947 rtx op0, op1;
2948 enum machine_mode mode;
2949 enum machine_mode mode2;
2951 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2952 return NULL_RTX;
2954 arg0 = CALL_EXPR_ARG (exp, 0);
2955 arg1 = CALL_EXPR_ARG (exp, 1);
2956 mode = TYPE_MODE (TREE_TYPE (exp));
2958 /* Emit a libcall to libgcc. */
2960 /* Mode of the 2nd argument must match that of an int. */
2961 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2963 if (target == NULL_RTX)
2964 target = gen_reg_rtx (mode);
2966 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2967 if (GET_MODE (op0) != mode)
2968 op0 = convert_to_mode (mode, op0, 0);
2969 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2970 if (GET_MODE (op1) != mode2)
2971 op1 = convert_to_mode (mode2, op1, 0);
2973 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2974 target, LCT_CONST, mode, 2,
2975 op0, mode, op1, mode2);
2977 return target;
2980 /* Expand expression EXP which is a call to the strlen builtin. Return
2981 NULL_RTX if we failed the caller should emit a normal call, otherwise
2982 try to get the result in TARGET, if convenient. */
2984 static rtx
2985 expand_builtin_strlen (tree exp, rtx target,
2986 enum machine_mode target_mode)
2988 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2989 return NULL_RTX;
2990 else
2992 struct expand_operand ops[4];
2993 rtx pat;
2994 tree len;
2995 tree src = CALL_EXPR_ARG (exp, 0);
2996 rtx src_reg;
2997 rtx_insn *before_strlen;
2998 enum machine_mode insn_mode = target_mode;
2999 enum insn_code icode = CODE_FOR_nothing;
3000 unsigned int align;
3002 /* If the length can be computed at compile-time, return it. */
3003 len = c_strlen (src, 0);
3004 if (len)
3005 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3007 /* If the length can be computed at compile-time and is constant
3008 integer, but there are side-effects in src, evaluate
3009 src for side-effects, then return len.
3010 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3011 can be optimized into: i++; x = 3; */
3012 len = c_strlen (src, 1);
3013 if (len && TREE_CODE (len) == INTEGER_CST)
3015 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3016 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3019 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3021 /* If SRC is not a pointer type, don't do this operation inline. */
3022 if (align == 0)
3023 return NULL_RTX;
3025 /* Bail out if we can't compute strlen in the right mode. */
3026 while (insn_mode != VOIDmode)
3028 icode = optab_handler (strlen_optab, insn_mode);
3029 if (icode != CODE_FOR_nothing)
3030 break;
3032 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3034 if (insn_mode == VOIDmode)
3035 return NULL_RTX;
3037 /* Make a place to hold the source address. We will not expand
3038 the actual source until we are sure that the expansion will
3039 not fail -- there are trees that cannot be expanded twice. */
3040 src_reg = gen_reg_rtx (Pmode);
3042 /* Mark the beginning of the strlen sequence so we can emit the
3043 source operand later. */
3044 before_strlen = get_last_insn ();
3046 create_output_operand (&ops[0], target, insn_mode);
3047 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3048 create_integer_operand (&ops[2], 0);
3049 create_integer_operand (&ops[3], align);
3050 if (!maybe_expand_insn (icode, 4, ops))
3051 return NULL_RTX;
3053 /* Now that we are assured of success, expand the source. */
3054 start_sequence ();
3055 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3056 if (pat != src_reg)
3058 #ifdef POINTERS_EXTEND_UNSIGNED
3059 if (GET_MODE (pat) != Pmode)
3060 pat = convert_to_mode (Pmode, pat,
3061 POINTERS_EXTEND_UNSIGNED);
3062 #endif
3063 emit_move_insn (src_reg, pat);
3065 pat = get_insns ();
3066 end_sequence ();
3068 if (before_strlen)
3069 emit_insn_after (pat, before_strlen);
3070 else
3071 emit_insn_before (pat, get_insns ());
3073 /* Return the value in the proper mode for this function. */
3074 if (GET_MODE (ops[0].value) == target_mode)
3075 target = ops[0].value;
3076 else if (target != 0)
3077 convert_move (target, ops[0].value, 0);
3078 else
3079 target = convert_to_mode (target_mode, ops[0].value, 0);
3081 return target;
3085 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3086 bytes from constant string DATA + OFFSET and return it as target
3087 constant. */
3089 static rtx
3090 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3091 enum machine_mode mode)
3093 const char *str = (const char *) data;
3095 gcc_assert (offset >= 0
3096 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3097 <= strlen (str) + 1));
3099 return c_readstr (str + offset, mode);
3102 /* LEN specify length of the block of memcpy/memset operation.
3103 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3104 In some cases we can make very likely guess on max size, then we
3105 set it into PROBABLE_MAX_SIZE. */
3107 static void
3108 determine_block_size (tree len, rtx len_rtx,
3109 unsigned HOST_WIDE_INT *min_size,
3110 unsigned HOST_WIDE_INT *max_size,
3111 unsigned HOST_WIDE_INT *probable_max_size)
3113 if (CONST_INT_P (len_rtx))
3115 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3116 return;
3118 else
3120 wide_int min, max;
3121 enum value_range_type range_type = VR_UNDEFINED;
3123 /* Determine bounds from the type. */
3124 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3125 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3126 else
3127 *min_size = 0;
3128 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3129 *probable_max_size = *max_size
3130 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3131 else
3132 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3134 if (TREE_CODE (len) == SSA_NAME)
3135 range_type = get_range_info (len, &min, &max);
3136 if (range_type == VR_RANGE)
3138 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3139 *min_size = min.to_uhwi ();
3140 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3141 *probable_max_size = *max_size = max.to_uhwi ();
3143 else if (range_type == VR_ANTI_RANGE)
3145 /* Anti range 0...N lets us to determine minimal size to N+1. */
3146 if (min == 0)
3148 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3149 *min_size = max.to_uhwi () + 1;
3151 /* Code like
3153 int n;
3154 if (n < 100)
3155 memcpy (a, b, n)
3157 Produce anti range allowing negative values of N. We still
3158 can use the information and make a guess that N is not negative.
3160 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3161 *probable_max_size = min.to_uhwi () - 1;
3164 gcc_checking_assert (*max_size <=
3165 (unsigned HOST_WIDE_INT)
3166 GET_MODE_MASK (GET_MODE (len_rtx)));
3169 /* Expand a call EXP to the memcpy builtin.
3170 Return NULL_RTX if we failed, the caller should emit a normal call,
3171 otherwise try to get the result in TARGET, if convenient (and in
3172 mode MODE if that's convenient). */
3174 static rtx
3175 expand_builtin_memcpy (tree exp, rtx target)
3177 if (!validate_arglist (exp,
3178 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3179 return NULL_RTX;
3180 else
3182 tree dest = CALL_EXPR_ARG (exp, 0);
3183 tree src = CALL_EXPR_ARG (exp, 1);
3184 tree len = CALL_EXPR_ARG (exp, 2);
3185 const char *src_str;
3186 unsigned int src_align = get_pointer_alignment (src);
3187 unsigned int dest_align = get_pointer_alignment (dest);
3188 rtx dest_mem, src_mem, dest_addr, len_rtx;
3189 HOST_WIDE_INT expected_size = -1;
3190 unsigned int expected_align = 0;
3191 unsigned HOST_WIDE_INT min_size;
3192 unsigned HOST_WIDE_INT max_size;
3193 unsigned HOST_WIDE_INT probable_max_size;
3195 /* If DEST is not a pointer type, call the normal function. */
3196 if (dest_align == 0)
3197 return NULL_RTX;
3199 /* If either SRC is not a pointer type, don't do this
3200 operation in-line. */
3201 if (src_align == 0)
3202 return NULL_RTX;
3204 if (currently_expanding_gimple_stmt)
3205 stringop_block_profile (currently_expanding_gimple_stmt,
3206 &expected_align, &expected_size);
3208 if (expected_align < dest_align)
3209 expected_align = dest_align;
3210 dest_mem = get_memory_rtx (dest, len);
3211 set_mem_align (dest_mem, dest_align);
3212 len_rtx = expand_normal (len);
3213 determine_block_size (len, len_rtx, &min_size, &max_size,
3214 &probable_max_size);
3215 src_str = c_getstr (src);
3217 /* If SRC is a string constant and block move would be done
3218 by pieces, we can avoid loading the string from memory
3219 and only stored the computed constants. */
3220 if (src_str
3221 && CONST_INT_P (len_rtx)
3222 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3223 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3224 CONST_CAST (char *, src_str),
3225 dest_align, false))
3227 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3228 builtin_memcpy_read_str,
3229 CONST_CAST (char *, src_str),
3230 dest_align, false, 0);
3231 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3232 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3233 return dest_mem;
3236 src_mem = get_memory_rtx (src, len);
3237 set_mem_align (src_mem, src_align);
3239 /* Copy word part most expediently. */
3240 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3241 CALL_EXPR_TAILCALL (exp)
3242 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3243 expected_align, expected_size,
3244 min_size, max_size, probable_max_size);
3246 if (dest_addr == 0)
3248 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3249 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3251 return dest_addr;
3255 /* Expand a call EXP to the mempcpy builtin.
3256 Return NULL_RTX if we failed; the caller should emit a normal call,
3257 otherwise try to get the result in TARGET, if convenient (and in
3258 mode MODE if that's convenient). If ENDP is 0 return the
3259 destination pointer, if ENDP is 1 return the end pointer ala
3260 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3261 stpcpy. */
3263 static rtx
3264 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3266 if (!validate_arglist (exp,
3267 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3268 return NULL_RTX;
3269 else
3271 tree dest = CALL_EXPR_ARG (exp, 0);
3272 tree src = CALL_EXPR_ARG (exp, 1);
3273 tree len = CALL_EXPR_ARG (exp, 2);
3274 return expand_builtin_mempcpy_args (dest, src, len,
3275 target, mode, /*endp=*/ 1);
3279 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3280 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3281 so that this can also be called without constructing an actual CALL_EXPR.
3282 The other arguments and return value are the same as for
3283 expand_builtin_mempcpy. */
3285 static rtx
3286 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3287 rtx target, enum machine_mode mode, int endp)
3289 /* If return value is ignored, transform mempcpy into memcpy. */
3290 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3292 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3293 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3294 dest, src, len);
3295 return expand_expr (result, target, mode, EXPAND_NORMAL);
3297 else
3299 const char *src_str;
3300 unsigned int src_align = get_pointer_alignment (src);
3301 unsigned int dest_align = get_pointer_alignment (dest);
3302 rtx dest_mem, src_mem, len_rtx;
3304 /* If either SRC or DEST is not a pointer type, don't do this
3305 operation in-line. */
3306 if (dest_align == 0 || src_align == 0)
3307 return NULL_RTX;
3309 /* If LEN is not constant, call the normal function. */
3310 if (! tree_fits_uhwi_p (len))
3311 return NULL_RTX;
3313 len_rtx = expand_normal (len);
3314 src_str = c_getstr (src);
3316 /* If SRC is a string constant and block move would be done
3317 by pieces, we can avoid loading the string from memory
3318 and only stored the computed constants. */
3319 if (src_str
3320 && CONST_INT_P (len_rtx)
3321 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3322 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3323 CONST_CAST (char *, src_str),
3324 dest_align, false))
3326 dest_mem = get_memory_rtx (dest, len);
3327 set_mem_align (dest_mem, dest_align);
3328 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3329 builtin_memcpy_read_str,
3330 CONST_CAST (char *, src_str),
3331 dest_align, false, endp);
3332 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3333 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3334 return dest_mem;
3337 if (CONST_INT_P (len_rtx)
3338 && can_move_by_pieces (INTVAL (len_rtx),
3339 MIN (dest_align, src_align)))
3341 dest_mem = get_memory_rtx (dest, len);
3342 set_mem_align (dest_mem, dest_align);
3343 src_mem = get_memory_rtx (src, len);
3344 set_mem_align (src_mem, src_align);
3345 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3346 MIN (dest_align, src_align), endp);
3347 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3348 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3349 return dest_mem;
3352 return NULL_RTX;
3356 #ifndef HAVE_movstr
3357 # define HAVE_movstr 0
3358 # define CODE_FOR_movstr CODE_FOR_nothing
3359 #endif
3361 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3362 we failed, the caller should emit a normal call, otherwise try to
3363 get the result in TARGET, if convenient. If ENDP is 0 return the
3364 destination pointer, if ENDP is 1 return the end pointer ala
3365 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3366 stpcpy. */
3368 static rtx
3369 expand_movstr (tree dest, tree src, rtx target, int endp)
3371 struct expand_operand ops[3];
3372 rtx dest_mem;
3373 rtx src_mem;
3375 if (!HAVE_movstr)
3376 return NULL_RTX;
3378 dest_mem = get_memory_rtx (dest, NULL);
3379 src_mem = get_memory_rtx (src, NULL);
3380 if (!endp)
3382 target = force_reg (Pmode, XEXP (dest_mem, 0));
3383 dest_mem = replace_equiv_address (dest_mem, target);
3386 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3387 create_fixed_operand (&ops[1], dest_mem);
3388 create_fixed_operand (&ops[2], src_mem);
3389 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3390 return NULL_RTX;
3392 if (endp && target != const0_rtx)
3394 target = ops[0].value;
3395 /* movstr is supposed to set end to the address of the NUL
3396 terminator. If the caller requested a mempcpy-like return value,
3397 adjust it. */
3398 if (endp == 1)
3400 rtx tem = plus_constant (GET_MODE (target),
3401 gen_lowpart (GET_MODE (target), target), 1);
3402 emit_move_insn (target, force_operand (tem, NULL_RTX));
3405 return target;
3408 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3409 NULL_RTX if we failed the caller should emit a normal call, otherwise
3410 try to get the result in TARGET, if convenient (and in mode MODE if that's
3411 convenient). */
3413 static rtx
3414 expand_builtin_strcpy (tree exp, rtx target)
3416 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3418 tree dest = CALL_EXPR_ARG (exp, 0);
3419 tree src = CALL_EXPR_ARG (exp, 1);
3420 return expand_builtin_strcpy_args (dest, src, target);
3422 return NULL_RTX;
3425 /* Helper function to do the actual work for expand_builtin_strcpy. The
3426 arguments to the builtin_strcpy call DEST and SRC are broken out
3427 so that this can also be called without constructing an actual CALL_EXPR.
3428 The other arguments and return value are the same as for
3429 expand_builtin_strcpy. */
3431 static rtx
3432 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3434 return expand_movstr (dest, src, target, /*endp=*/0);
3437 /* Expand a call EXP to the stpcpy builtin.
3438 Return NULL_RTX if we failed the caller should emit a normal call,
3439 otherwise try to get the result in TARGET, if convenient (and in
3440 mode MODE if that's convenient). */
3442 static rtx
3443 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3445 tree dst, src;
3446 location_t loc = EXPR_LOCATION (exp);
3448 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3449 return NULL_RTX;
3451 dst = CALL_EXPR_ARG (exp, 0);
3452 src = CALL_EXPR_ARG (exp, 1);
3454 /* If return value is ignored, transform stpcpy into strcpy. */
3455 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3457 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3458 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3459 return expand_expr (result, target, mode, EXPAND_NORMAL);
3461 else
3463 tree len, lenp1;
3464 rtx ret;
3466 /* Ensure we get an actual string whose length can be evaluated at
3467 compile-time, not an expression containing a string. This is
3468 because the latter will potentially produce pessimized code
3469 when used to produce the return value. */
3470 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3471 return expand_movstr (dst, src, target, /*endp=*/2);
3473 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3474 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3475 target, mode, /*endp=*/2);
3477 if (ret)
3478 return ret;
3480 if (TREE_CODE (len) == INTEGER_CST)
3482 rtx len_rtx = expand_normal (len);
3484 if (CONST_INT_P (len_rtx))
3486 ret = expand_builtin_strcpy_args (dst, src, target);
3488 if (ret)
3490 if (! target)
3492 if (mode != VOIDmode)
3493 target = gen_reg_rtx (mode);
3494 else
3495 target = gen_reg_rtx (GET_MODE (ret));
3497 if (GET_MODE (target) != GET_MODE (ret))
3498 ret = gen_lowpart (GET_MODE (target), ret);
3500 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3501 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3502 gcc_assert (ret);
3504 return target;
3509 return expand_movstr (dst, src, target, /*endp=*/2);
3513 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3514 bytes from constant string DATA + OFFSET and return it as target
3515 constant. */
3518 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3519 enum machine_mode mode)
3521 const char *str = (const char *) data;
3523 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3524 return const0_rtx;
3526 return c_readstr (str + offset, mode);
3529 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3530 NULL_RTX if we failed the caller should emit a normal call. */
3532 static rtx
3533 expand_builtin_strncpy (tree exp, rtx target)
3535 location_t loc = EXPR_LOCATION (exp);
3537 if (validate_arglist (exp,
3538 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3540 tree dest = CALL_EXPR_ARG (exp, 0);
3541 tree src = CALL_EXPR_ARG (exp, 1);
3542 tree len = CALL_EXPR_ARG (exp, 2);
3543 tree slen = c_strlen (src, 1);
3545 /* We must be passed a constant len and src parameter. */
3546 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3547 return NULL_RTX;
3549 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3551 /* We're required to pad with trailing zeros if the requested
3552 len is greater than strlen(s2)+1. In that case try to
3553 use store_by_pieces, if it fails, punt. */
3554 if (tree_int_cst_lt (slen, len))
3556 unsigned int dest_align = get_pointer_alignment (dest);
3557 const char *p = c_getstr (src);
3558 rtx dest_mem;
3560 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3561 || !can_store_by_pieces (tree_to_uhwi (len),
3562 builtin_strncpy_read_str,
3563 CONST_CAST (char *, p),
3564 dest_align, false))
3565 return NULL_RTX;
3567 dest_mem = get_memory_rtx (dest, len);
3568 store_by_pieces (dest_mem, tree_to_uhwi (len),
3569 builtin_strncpy_read_str,
3570 CONST_CAST (char *, p), dest_align, false, 0);
3571 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3572 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3573 return dest_mem;
3576 return NULL_RTX;
3579 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3580 bytes from constant string DATA + OFFSET and return it as target
3581 constant. */
3584 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3585 enum machine_mode mode)
3587 const char *c = (const char *) data;
3588 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3590 memset (p, *c, GET_MODE_SIZE (mode));
3592 return c_readstr (p, mode);
3595 /* Callback routine for store_by_pieces. Return the RTL of a register
3596 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3597 char value given in the RTL register data. For example, if mode is
3598 4 bytes wide, return the RTL for 0x01010101*data. */
3600 static rtx
3601 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3602 enum machine_mode mode)
3604 rtx target, coeff;
3605 size_t size;
3606 char *p;
3608 size = GET_MODE_SIZE (mode);
3609 if (size == 1)
3610 return (rtx) data;
3612 p = XALLOCAVEC (char, size);
3613 memset (p, 1, size);
3614 coeff = c_readstr (p, mode);
3616 target = convert_to_mode (mode, (rtx) data, 1);
3617 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3618 return force_reg (mode, target);
3621 /* Expand expression EXP, which is a call to the memset builtin. Return
3622 NULL_RTX if we failed the caller should emit a normal call, otherwise
3623 try to get the result in TARGET, if convenient (and in mode MODE if that's
3624 convenient). */
3626 static rtx
3627 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3629 if (!validate_arglist (exp,
3630 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3631 return NULL_RTX;
3632 else
3634 tree dest = CALL_EXPR_ARG (exp, 0);
3635 tree val = CALL_EXPR_ARG (exp, 1);
3636 tree len = CALL_EXPR_ARG (exp, 2);
3637 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3641 /* Helper function to do the actual work for expand_builtin_memset. The
3642 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3643 so that this can also be called without constructing an actual CALL_EXPR.
3644 The other arguments and return value are the same as for
3645 expand_builtin_memset. */
3647 static rtx
3648 expand_builtin_memset_args (tree dest, tree val, tree len,
3649 rtx target, enum machine_mode mode, tree orig_exp)
3651 tree fndecl, fn;
3652 enum built_in_function fcode;
3653 enum machine_mode val_mode;
3654 char c;
3655 unsigned int dest_align;
3656 rtx dest_mem, dest_addr, len_rtx;
3657 HOST_WIDE_INT expected_size = -1;
3658 unsigned int expected_align = 0;
3659 unsigned HOST_WIDE_INT min_size;
3660 unsigned HOST_WIDE_INT max_size;
3661 unsigned HOST_WIDE_INT probable_max_size;
3663 dest_align = get_pointer_alignment (dest);
3665 /* If DEST is not a pointer type, don't do this operation in-line. */
3666 if (dest_align == 0)
3667 return NULL_RTX;
3669 if (currently_expanding_gimple_stmt)
3670 stringop_block_profile (currently_expanding_gimple_stmt,
3671 &expected_align, &expected_size);
3673 if (expected_align < dest_align)
3674 expected_align = dest_align;
3676 /* If the LEN parameter is zero, return DEST. */
3677 if (integer_zerop (len))
3679 /* Evaluate and ignore VAL in case it has side-effects. */
3680 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3681 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3684 /* Stabilize the arguments in case we fail. */
3685 dest = builtin_save_expr (dest);
3686 val = builtin_save_expr (val);
3687 len = builtin_save_expr (len);
3689 len_rtx = expand_normal (len);
3690 determine_block_size (len, len_rtx, &min_size, &max_size,
3691 &probable_max_size);
3692 dest_mem = get_memory_rtx (dest, len);
3693 val_mode = TYPE_MODE (unsigned_char_type_node);
3695 if (TREE_CODE (val) != INTEGER_CST)
3697 rtx val_rtx;
3699 val_rtx = expand_normal (val);
3700 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3702 /* Assume that we can memset by pieces if we can store
3703 * the coefficients by pieces (in the required modes).
3704 * We can't pass builtin_memset_gen_str as that emits RTL. */
3705 c = 1;
3706 if (tree_fits_uhwi_p (len)
3707 && can_store_by_pieces (tree_to_uhwi (len),
3708 builtin_memset_read_str, &c, dest_align,
3709 true))
3711 val_rtx = force_reg (val_mode, val_rtx);
3712 store_by_pieces (dest_mem, tree_to_uhwi (len),
3713 builtin_memset_gen_str, val_rtx, dest_align,
3714 true, 0);
3716 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3717 dest_align, expected_align,
3718 expected_size, min_size, max_size,
3719 probable_max_size))
3720 goto do_libcall;
3722 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3723 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3724 return dest_mem;
3727 if (target_char_cast (val, &c))
3728 goto do_libcall;
3730 if (c)
3732 if (tree_fits_uhwi_p (len)
3733 && can_store_by_pieces (tree_to_uhwi (len),
3734 builtin_memset_read_str, &c, dest_align,
3735 true))
3736 store_by_pieces (dest_mem, tree_to_uhwi (len),
3737 builtin_memset_read_str, &c, dest_align, true, 0);
3738 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3739 gen_int_mode (c, val_mode),
3740 dest_align, expected_align,
3741 expected_size, min_size, max_size,
3742 probable_max_size))
3743 goto do_libcall;
3745 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3746 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3747 return dest_mem;
3750 set_mem_align (dest_mem, dest_align);
3751 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3752 CALL_EXPR_TAILCALL (orig_exp)
3753 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3754 expected_align, expected_size,
3755 min_size, max_size,
3756 probable_max_size);
3758 if (dest_addr == 0)
3760 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3761 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3764 return dest_addr;
3766 do_libcall:
3767 fndecl = get_callee_fndecl (orig_exp);
3768 fcode = DECL_FUNCTION_CODE (fndecl);
3769 if (fcode == BUILT_IN_MEMSET)
3770 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3771 dest, val, len);
3772 else if (fcode == BUILT_IN_BZERO)
3773 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3774 dest, len);
3775 else
3776 gcc_unreachable ();
3777 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3778 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3779 return expand_call (fn, target, target == const0_rtx);
3782 /* Expand expression EXP, which is a call to the bzero builtin. Return
3783 NULL_RTX if we failed the caller should emit a normal call. */
3785 static rtx
3786 expand_builtin_bzero (tree exp)
3788 tree dest, size;
3789 location_t loc = EXPR_LOCATION (exp);
3791 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3792 return NULL_RTX;
3794 dest = CALL_EXPR_ARG (exp, 0);
3795 size = CALL_EXPR_ARG (exp, 1);
3797 /* New argument list transforming bzero(ptr x, int y) to
3798 memset(ptr x, int 0, size_t y). This is done this way
3799 so that if it isn't expanded inline, we fallback to
3800 calling bzero instead of memset. */
3802 return expand_builtin_memset_args (dest, integer_zero_node,
3803 fold_convert_loc (loc,
3804 size_type_node, size),
3805 const0_rtx, VOIDmode, exp);
3808 /* Expand expression EXP, which is a call to the memcmp built-in function.
3809 Return NULL_RTX if we failed and the caller should emit a normal call,
3810 otherwise try to get the result in TARGET, if convenient (and in mode
3811 MODE, if that's convenient). */
3813 static rtx
3814 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3815 ATTRIBUTE_UNUSED enum machine_mode mode)
3817 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3819 if (!validate_arglist (exp,
3820 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3821 return NULL_RTX;
3823 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3824 implementing memcmp because it will stop if it encounters two
3825 zero bytes. */
3826 #if defined HAVE_cmpmemsi
3828 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3829 rtx result;
3830 rtx insn;
3831 tree arg1 = CALL_EXPR_ARG (exp, 0);
3832 tree arg2 = CALL_EXPR_ARG (exp, 1);
3833 tree len = CALL_EXPR_ARG (exp, 2);
3835 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3836 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3837 enum machine_mode insn_mode;
3839 if (HAVE_cmpmemsi)
3840 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3841 else
3842 return NULL_RTX;
3844 /* If we don't have POINTER_TYPE, call the function. */
3845 if (arg1_align == 0 || arg2_align == 0)
3846 return NULL_RTX;
3848 /* Make a place to write the result of the instruction. */
3849 result = target;
3850 if (! (result != 0
3851 && REG_P (result) && GET_MODE (result) == insn_mode
3852 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3853 result = gen_reg_rtx (insn_mode);
3855 arg1_rtx = get_memory_rtx (arg1, len);
3856 arg2_rtx = get_memory_rtx (arg2, len);
3857 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3859 /* Set MEM_SIZE as appropriate. */
3860 if (CONST_INT_P (arg3_rtx))
3862 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3863 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3866 if (HAVE_cmpmemsi)
3867 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3868 GEN_INT (MIN (arg1_align, arg2_align)));
3869 else
3870 gcc_unreachable ();
3872 if (insn)
3873 emit_insn (insn);
3874 else
3875 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3876 TYPE_MODE (integer_type_node), 3,
3877 XEXP (arg1_rtx, 0), Pmode,
3878 XEXP (arg2_rtx, 0), Pmode,
3879 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3880 TYPE_UNSIGNED (sizetype)),
3881 TYPE_MODE (sizetype));
3883 /* Return the value in the proper mode for this function. */
3884 mode = TYPE_MODE (TREE_TYPE (exp));
3885 if (GET_MODE (result) == mode)
3886 return result;
3887 else if (target != 0)
3889 convert_move (target, result, 0);
3890 return target;
3892 else
3893 return convert_to_mode (mode, result, 0);
3895 #endif /* HAVE_cmpmemsi. */
3897 return NULL_RTX;
3900 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3901 if we failed the caller should emit a normal call, otherwise try to get
3902 the result in TARGET, if convenient. */
3904 static rtx
3905 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3907 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3908 return NULL_RTX;
3910 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3911 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3912 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3914 rtx arg1_rtx, arg2_rtx;
3915 rtx result, insn = NULL_RTX;
3916 tree fndecl, fn;
3917 tree arg1 = CALL_EXPR_ARG (exp, 0);
3918 tree arg2 = CALL_EXPR_ARG (exp, 1);
3920 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3921 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3923 /* If we don't have POINTER_TYPE, call the function. */
3924 if (arg1_align == 0 || arg2_align == 0)
3925 return NULL_RTX;
3927 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3928 arg1 = builtin_save_expr (arg1);
3929 arg2 = builtin_save_expr (arg2);
3931 arg1_rtx = get_memory_rtx (arg1, NULL);
3932 arg2_rtx = get_memory_rtx (arg2, NULL);
3934 #ifdef HAVE_cmpstrsi
3935 /* Try to call cmpstrsi. */
3936 if (HAVE_cmpstrsi)
3938 enum machine_mode insn_mode
3939 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3941 /* Make a place to write the result of the instruction. */
3942 result = target;
3943 if (! (result != 0
3944 && REG_P (result) && GET_MODE (result) == insn_mode
3945 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3946 result = gen_reg_rtx (insn_mode);
3948 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3949 GEN_INT (MIN (arg1_align, arg2_align)));
3951 #endif
3952 #ifdef HAVE_cmpstrnsi
3953 /* Try to determine at least one length and call cmpstrnsi. */
3954 if (!insn && HAVE_cmpstrnsi)
3956 tree len;
3957 rtx arg3_rtx;
3959 enum machine_mode insn_mode
3960 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3961 tree len1 = c_strlen (arg1, 1);
3962 tree len2 = c_strlen (arg2, 1);
3964 if (len1)
3965 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3966 if (len2)
3967 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3969 /* If we don't have a constant length for the first, use the length
3970 of the second, if we know it. We don't require a constant for
3971 this case; some cost analysis could be done if both are available
3972 but neither is constant. For now, assume they're equally cheap,
3973 unless one has side effects. If both strings have constant lengths,
3974 use the smaller. */
3976 if (!len1)
3977 len = len2;
3978 else if (!len2)
3979 len = len1;
3980 else if (TREE_SIDE_EFFECTS (len1))
3981 len = len2;
3982 else if (TREE_SIDE_EFFECTS (len2))
3983 len = len1;
3984 else if (TREE_CODE (len1) != INTEGER_CST)
3985 len = len2;
3986 else if (TREE_CODE (len2) != INTEGER_CST)
3987 len = len1;
3988 else if (tree_int_cst_lt (len1, len2))
3989 len = len1;
3990 else
3991 len = len2;
3993 /* If both arguments have side effects, we cannot optimize. */
3994 if (!len || TREE_SIDE_EFFECTS (len))
3995 goto do_libcall;
3997 arg3_rtx = expand_normal (len);
3999 /* Make a place to write the result of the instruction. */
4000 result = target;
4001 if (! (result != 0
4002 && REG_P (result) && GET_MODE (result) == insn_mode
4003 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4004 result = gen_reg_rtx (insn_mode);
4006 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4007 GEN_INT (MIN (arg1_align, arg2_align)));
4009 #endif
4011 if (insn)
4013 enum machine_mode mode;
4014 emit_insn (insn);
4016 /* Return the value in the proper mode for this function. */
4017 mode = TYPE_MODE (TREE_TYPE (exp));
4018 if (GET_MODE (result) == mode)
4019 return result;
4020 if (target == 0)
4021 return convert_to_mode (mode, result, 0);
4022 convert_move (target, result, 0);
4023 return target;
4026 /* Expand the library call ourselves using a stabilized argument
4027 list to avoid re-evaluating the function's arguments twice. */
4028 #ifdef HAVE_cmpstrnsi
4029 do_libcall:
4030 #endif
4031 fndecl = get_callee_fndecl (exp);
4032 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4033 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4034 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4035 return expand_call (fn, target, target == const0_rtx);
4037 #endif
4038 return NULL_RTX;
4041 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4042 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4043 the result in TARGET, if convenient. */
4045 static rtx
4046 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4047 ATTRIBUTE_UNUSED enum machine_mode mode)
4049 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4051 if (!validate_arglist (exp,
4052 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4053 return NULL_RTX;
4055 /* If c_strlen can determine an expression for one of the string
4056 lengths, and it doesn't have side effects, then emit cmpstrnsi
4057 using length MIN(strlen(string)+1, arg3). */
4058 #ifdef HAVE_cmpstrnsi
4059 if (HAVE_cmpstrnsi)
4061 tree len, len1, len2;
4062 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4063 rtx result, insn;
4064 tree fndecl, fn;
4065 tree arg1 = CALL_EXPR_ARG (exp, 0);
4066 tree arg2 = CALL_EXPR_ARG (exp, 1);
4067 tree arg3 = CALL_EXPR_ARG (exp, 2);
4069 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4070 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4071 enum machine_mode insn_mode
4072 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4074 len1 = c_strlen (arg1, 1);
4075 len2 = c_strlen (arg2, 1);
4077 if (len1)
4078 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4079 if (len2)
4080 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4082 /* If we don't have a constant length for the first, use the length
4083 of the second, if we know it. We don't require a constant for
4084 this case; some cost analysis could be done if both are available
4085 but neither is constant. For now, assume they're equally cheap,
4086 unless one has side effects. If both strings have constant lengths,
4087 use the smaller. */
4089 if (!len1)
4090 len = len2;
4091 else if (!len2)
4092 len = len1;
4093 else if (TREE_SIDE_EFFECTS (len1))
4094 len = len2;
4095 else if (TREE_SIDE_EFFECTS (len2))
4096 len = len1;
4097 else if (TREE_CODE (len1) != INTEGER_CST)
4098 len = len2;
4099 else if (TREE_CODE (len2) != INTEGER_CST)
4100 len = len1;
4101 else if (tree_int_cst_lt (len1, len2))
4102 len = len1;
4103 else
4104 len = len2;
4106 /* If both arguments have side effects, we cannot optimize. */
4107 if (!len || TREE_SIDE_EFFECTS (len))
4108 return NULL_RTX;
4110 /* The actual new length parameter is MIN(len,arg3). */
4111 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4112 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4114 /* If we don't have POINTER_TYPE, call the function. */
4115 if (arg1_align == 0 || arg2_align == 0)
4116 return NULL_RTX;
4118 /* Make a place to write the result of the instruction. */
4119 result = target;
4120 if (! (result != 0
4121 && REG_P (result) && GET_MODE (result) == insn_mode
4122 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4123 result = gen_reg_rtx (insn_mode);
4125 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4126 arg1 = builtin_save_expr (arg1);
4127 arg2 = builtin_save_expr (arg2);
4128 len = builtin_save_expr (len);
4130 arg1_rtx = get_memory_rtx (arg1, len);
4131 arg2_rtx = get_memory_rtx (arg2, len);
4132 arg3_rtx = expand_normal (len);
4133 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4134 GEN_INT (MIN (arg1_align, arg2_align)));
4135 if (insn)
4137 emit_insn (insn);
4139 /* Return the value in the proper mode for this function. */
4140 mode = TYPE_MODE (TREE_TYPE (exp));
4141 if (GET_MODE (result) == mode)
4142 return result;
4143 if (target == 0)
4144 return convert_to_mode (mode, result, 0);
4145 convert_move (target, result, 0);
4146 return target;
4149 /* Expand the library call ourselves using a stabilized argument
4150 list to avoid re-evaluating the function's arguments twice. */
4151 fndecl = get_callee_fndecl (exp);
4152 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4153 arg1, arg2, len);
4154 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4155 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4156 return expand_call (fn, target, target == const0_rtx);
4158 #endif
4159 return NULL_RTX;
4162 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4163 if that's convenient. */
4166 expand_builtin_saveregs (void)
4168 rtx val;
4169 rtx_insn *seq;
4171 /* Don't do __builtin_saveregs more than once in a function.
4172 Save the result of the first call and reuse it. */
4173 if (saveregs_value != 0)
4174 return saveregs_value;
4176 /* When this function is called, it means that registers must be
4177 saved on entry to this function. So we migrate the call to the
4178 first insn of this function. */
4180 start_sequence ();
4182 /* Do whatever the machine needs done in this case. */
4183 val = targetm.calls.expand_builtin_saveregs ();
4185 seq = get_insns ();
4186 end_sequence ();
4188 saveregs_value = val;
4190 /* Put the insns after the NOTE that starts the function. If this
4191 is inside a start_sequence, make the outer-level insn chain current, so
4192 the code is placed at the start of the function. */
4193 push_topmost_sequence ();
4194 emit_insn_after (seq, entry_of_function ());
4195 pop_topmost_sequence ();
4197 return val;
4200 /* Expand a call to __builtin_next_arg. */
4202 static rtx
4203 expand_builtin_next_arg (void)
4205 /* Checking arguments is already done in fold_builtin_next_arg
4206 that must be called before this function. */
4207 return expand_binop (ptr_mode, add_optab,
4208 crtl->args.internal_arg_pointer,
4209 crtl->args.arg_offset_rtx,
4210 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4213 /* Make it easier for the backends by protecting the valist argument
4214 from multiple evaluations. */
4216 static tree
4217 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4219 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4221 /* The current way of determining the type of valist is completely
4222 bogus. We should have the information on the va builtin instead. */
4223 if (!vatype)
4224 vatype = targetm.fn_abi_va_list (cfun->decl);
4226 if (TREE_CODE (vatype) == ARRAY_TYPE)
4228 if (TREE_SIDE_EFFECTS (valist))
4229 valist = save_expr (valist);
4231 /* For this case, the backends will be expecting a pointer to
4232 vatype, but it's possible we've actually been given an array
4233 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4234 So fix it. */
4235 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4237 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4238 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4241 else
4243 tree pt = build_pointer_type (vatype);
4245 if (! needs_lvalue)
4247 if (! TREE_SIDE_EFFECTS (valist))
4248 return valist;
4250 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4251 TREE_SIDE_EFFECTS (valist) = 1;
4254 if (TREE_SIDE_EFFECTS (valist))
4255 valist = save_expr (valist);
4256 valist = fold_build2_loc (loc, MEM_REF,
4257 vatype, valist, build_int_cst (pt, 0));
4260 return valist;
4263 /* The "standard" definition of va_list is void*. */
4265 tree
4266 std_build_builtin_va_list (void)
4268 return ptr_type_node;
4271 /* The "standard" abi va_list is va_list_type_node. */
4273 tree
4274 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4276 return va_list_type_node;
4279 /* The "standard" type of va_list is va_list_type_node. */
4281 tree
4282 std_canonical_va_list_type (tree type)
4284 tree wtype, htype;
4286 if (INDIRECT_REF_P (type))
4287 type = TREE_TYPE (type);
4288 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4289 type = TREE_TYPE (type);
4290 wtype = va_list_type_node;
4291 htype = type;
4292 /* Treat structure va_list types. */
4293 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4294 htype = TREE_TYPE (htype);
4295 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4297 /* If va_list is an array type, the argument may have decayed
4298 to a pointer type, e.g. by being passed to another function.
4299 In that case, unwrap both types so that we can compare the
4300 underlying records. */
4301 if (TREE_CODE (htype) == ARRAY_TYPE
4302 || POINTER_TYPE_P (htype))
4304 wtype = TREE_TYPE (wtype);
4305 htype = TREE_TYPE (htype);
4308 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4309 return va_list_type_node;
4311 return NULL_TREE;
4314 /* The "standard" implementation of va_start: just assign `nextarg' to
4315 the variable. */
4317 void
4318 std_expand_builtin_va_start (tree valist, rtx nextarg)
4320 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4321 convert_move (va_r, nextarg, 0);
4324 /* Expand EXP, a call to __builtin_va_start. */
4326 static rtx
4327 expand_builtin_va_start (tree exp)
4329 rtx nextarg;
4330 tree valist;
4331 location_t loc = EXPR_LOCATION (exp);
4333 if (call_expr_nargs (exp) < 2)
4335 error_at (loc, "too few arguments to function %<va_start%>");
4336 return const0_rtx;
4339 if (fold_builtin_next_arg (exp, true))
4340 return const0_rtx;
4342 nextarg = expand_builtin_next_arg ();
4343 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4345 if (targetm.expand_builtin_va_start)
4346 targetm.expand_builtin_va_start (valist, nextarg);
4347 else
4348 std_expand_builtin_va_start (valist, nextarg);
4350 return const0_rtx;
4353 /* Expand EXP, a call to __builtin_va_end. */
4355 static rtx
4356 expand_builtin_va_end (tree exp)
4358 tree valist = CALL_EXPR_ARG (exp, 0);
4360 /* Evaluate for side effects, if needed. I hate macros that don't
4361 do that. */
4362 if (TREE_SIDE_EFFECTS (valist))
4363 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4365 return const0_rtx;
4368 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4369 builtin rather than just as an assignment in stdarg.h because of the
4370 nastiness of array-type va_list types. */
4372 static rtx
4373 expand_builtin_va_copy (tree exp)
4375 tree dst, src, t;
4376 location_t loc = EXPR_LOCATION (exp);
4378 dst = CALL_EXPR_ARG (exp, 0);
4379 src = CALL_EXPR_ARG (exp, 1);
4381 dst = stabilize_va_list_loc (loc, dst, 1);
4382 src = stabilize_va_list_loc (loc, src, 0);
4384 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4386 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4388 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4389 TREE_SIDE_EFFECTS (t) = 1;
4390 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4392 else
4394 rtx dstb, srcb, size;
4396 /* Evaluate to pointers. */
4397 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4398 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4399 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4400 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4402 dstb = convert_memory_address (Pmode, dstb);
4403 srcb = convert_memory_address (Pmode, srcb);
4405 /* "Dereference" to BLKmode memories. */
4406 dstb = gen_rtx_MEM (BLKmode, dstb);
4407 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4408 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4409 srcb = gen_rtx_MEM (BLKmode, srcb);
4410 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4411 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4413 /* Copy. */
4414 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4417 return const0_rtx;
4420 /* Expand a call to one of the builtin functions __builtin_frame_address or
4421 __builtin_return_address. */
4423 static rtx
4424 expand_builtin_frame_address (tree fndecl, tree exp)
4426 /* The argument must be a nonnegative integer constant.
4427 It counts the number of frames to scan up the stack.
4428 The value is the return address saved in that frame. */
4429 if (call_expr_nargs (exp) == 0)
4430 /* Warning about missing arg was already issued. */
4431 return const0_rtx;
4432 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4434 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4435 error ("invalid argument to %<__builtin_frame_address%>");
4436 else
4437 error ("invalid argument to %<__builtin_return_address%>");
4438 return const0_rtx;
4440 else
4442 rtx tem
4443 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4444 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4446 /* Some ports cannot access arbitrary stack frames. */
4447 if (tem == NULL)
4449 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4450 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4451 else
4452 warning (0, "unsupported argument to %<__builtin_return_address%>");
4453 return const0_rtx;
4456 /* For __builtin_frame_address, return what we've got. */
4457 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4458 return tem;
4460 if (!REG_P (tem)
4461 && ! CONSTANT_P (tem))
4462 tem = copy_addr_to_reg (tem);
4463 return tem;
4467 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4468 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4469 is the same as for allocate_dynamic_stack_space. */
4471 static rtx
4472 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4474 rtx op0;
4475 rtx result;
4476 bool valid_arglist;
4477 unsigned int align;
4478 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4479 == BUILT_IN_ALLOCA_WITH_ALIGN);
4481 valid_arglist
4482 = (alloca_with_align
4483 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4484 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4486 if (!valid_arglist)
4487 return NULL_RTX;
4489 /* Compute the argument. */
4490 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4492 /* Compute the alignment. */
4493 align = (alloca_with_align
4494 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4495 : BIGGEST_ALIGNMENT);
4497 /* Allocate the desired space. */
4498 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4499 result = convert_memory_address (ptr_mode, result);
4501 return result;
4504 /* Expand a call to bswap builtin in EXP.
4505 Return NULL_RTX if a normal call should be emitted rather than expanding the
4506 function in-line. If convenient, the result should be placed in TARGET.
4507 SUBTARGET may be used as the target for computing one of EXP's operands. */
4509 static rtx
4510 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4511 rtx subtarget)
4513 tree arg;
4514 rtx op0;
4516 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4517 return NULL_RTX;
4519 arg = CALL_EXPR_ARG (exp, 0);
4520 op0 = expand_expr (arg,
4521 subtarget && GET_MODE (subtarget) == target_mode
4522 ? subtarget : NULL_RTX,
4523 target_mode, EXPAND_NORMAL);
4524 if (GET_MODE (op0) != target_mode)
4525 op0 = convert_to_mode (target_mode, op0, 1);
4527 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4529 gcc_assert (target);
4531 return convert_to_mode (target_mode, target, 1);
4534 /* Expand a call to a unary builtin in EXP.
4535 Return NULL_RTX if a normal call should be emitted rather than expanding the
4536 function in-line. If convenient, the result should be placed in TARGET.
4537 SUBTARGET may be used as the target for computing one of EXP's operands. */
4539 static rtx
4540 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4541 rtx subtarget, optab op_optab)
4543 rtx op0;
4545 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4546 return NULL_RTX;
4548 /* Compute the argument. */
4549 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4550 (subtarget
4551 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4552 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4553 VOIDmode, EXPAND_NORMAL);
4554 /* Compute op, into TARGET if possible.
4555 Set TARGET to wherever the result comes back. */
4556 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4557 op_optab, op0, target, op_optab != clrsb_optab);
4558 gcc_assert (target);
4560 return convert_to_mode (target_mode, target, 0);
4563 /* Expand a call to __builtin_expect. We just return our argument
4564 as the builtin_expect semantic should've been already executed by
4565 tree branch prediction pass. */
4567 static rtx
4568 expand_builtin_expect (tree exp, rtx target)
4570 tree arg;
4572 if (call_expr_nargs (exp) < 2)
4573 return const0_rtx;
4574 arg = CALL_EXPR_ARG (exp, 0);
4576 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4577 /* When guessing was done, the hints should be already stripped away. */
4578 gcc_assert (!flag_guess_branch_prob
4579 || optimize == 0 || seen_error ());
4580 return target;
4583 /* Expand a call to __builtin_assume_aligned. We just return our first
4584 argument as the builtin_assume_aligned semantic should've been already
4585 executed by CCP. */
4587 static rtx
4588 expand_builtin_assume_aligned (tree exp, rtx target)
4590 if (call_expr_nargs (exp) < 2)
4591 return const0_rtx;
4592 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4593 EXPAND_NORMAL);
4594 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4595 && (call_expr_nargs (exp) < 3
4596 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4597 return target;
4600 void
4601 expand_builtin_trap (void)
4603 #ifdef HAVE_trap
4604 if (HAVE_trap)
4606 rtx insn = emit_insn (gen_trap ());
4607 /* For trap insns when not accumulating outgoing args force
4608 REG_ARGS_SIZE note to prevent crossjumping of calls with
4609 different args sizes. */
4610 if (!ACCUMULATE_OUTGOING_ARGS)
4611 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4613 else
4614 #endif
4615 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4616 emit_barrier ();
4619 /* Expand a call to __builtin_unreachable. We do nothing except emit
4620 a barrier saying that control flow will not pass here.
4622 It is the responsibility of the program being compiled to ensure
4623 that control flow does never reach __builtin_unreachable. */
4624 static void
4625 expand_builtin_unreachable (void)
4627 emit_barrier ();
4630 /* Expand EXP, a call to fabs, fabsf or fabsl.
4631 Return NULL_RTX if a normal call should be emitted rather than expanding
4632 the function inline. If convenient, the result should be placed
4633 in TARGET. SUBTARGET may be used as the target for computing
4634 the operand. */
4636 static rtx
4637 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4639 enum machine_mode mode;
4640 tree arg;
4641 rtx op0;
4643 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4644 return NULL_RTX;
4646 arg = CALL_EXPR_ARG (exp, 0);
4647 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4648 mode = TYPE_MODE (TREE_TYPE (arg));
4649 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4650 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4653 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4654 Return NULL is a normal call should be emitted rather than expanding the
4655 function inline. If convenient, the result should be placed in TARGET.
4656 SUBTARGET may be used as the target for computing the operand. */
4658 static rtx
4659 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4661 rtx op0, op1;
4662 tree arg;
4664 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4665 return NULL_RTX;
4667 arg = CALL_EXPR_ARG (exp, 0);
4668 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4670 arg = CALL_EXPR_ARG (exp, 1);
4671 op1 = expand_normal (arg);
4673 return expand_copysign (op0, op1, target);
4676 /* Expand a call to __builtin___clear_cache. */
4678 static rtx
4679 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4681 #ifndef HAVE_clear_cache
4682 #ifdef CLEAR_INSN_CACHE
4683 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4684 does something. Just do the default expansion to a call to
4685 __clear_cache(). */
4686 return NULL_RTX;
4687 #else
4688 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4689 does nothing. There is no need to call it. Do nothing. */
4690 return const0_rtx;
4691 #endif /* CLEAR_INSN_CACHE */
4692 #else
4693 /* We have a "clear_cache" insn, and it will handle everything. */
4694 tree begin, end;
4695 rtx begin_rtx, end_rtx;
4697 /* We must not expand to a library call. If we did, any
4698 fallback library function in libgcc that might contain a call to
4699 __builtin___clear_cache() would recurse infinitely. */
4700 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4702 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4703 return const0_rtx;
4706 if (HAVE_clear_cache)
4708 struct expand_operand ops[2];
4710 begin = CALL_EXPR_ARG (exp, 0);
4711 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4713 end = CALL_EXPR_ARG (exp, 1);
4714 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4716 create_address_operand (&ops[0], begin_rtx);
4717 create_address_operand (&ops[1], end_rtx);
4718 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4719 return const0_rtx;
4721 return const0_rtx;
4722 #endif /* HAVE_clear_cache */
4725 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4727 static rtx
4728 round_trampoline_addr (rtx tramp)
4730 rtx temp, addend, mask;
4732 /* If we don't need too much alignment, we'll have been guaranteed
4733 proper alignment by get_trampoline_type. */
4734 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4735 return tramp;
4737 /* Round address up to desired boundary. */
4738 temp = gen_reg_rtx (Pmode);
4739 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4740 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4742 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4743 temp, 0, OPTAB_LIB_WIDEN);
4744 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4745 temp, 0, OPTAB_LIB_WIDEN);
4747 return tramp;
4750 static rtx
4751 expand_builtin_init_trampoline (tree exp, bool onstack)
4753 tree t_tramp, t_func, t_chain;
4754 rtx m_tramp, r_tramp, r_chain, tmp;
4756 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4757 POINTER_TYPE, VOID_TYPE))
4758 return NULL_RTX;
4760 t_tramp = CALL_EXPR_ARG (exp, 0);
4761 t_func = CALL_EXPR_ARG (exp, 1);
4762 t_chain = CALL_EXPR_ARG (exp, 2);
4764 r_tramp = expand_normal (t_tramp);
4765 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4766 MEM_NOTRAP_P (m_tramp) = 1;
4768 /* If ONSTACK, the TRAMP argument should be the address of a field
4769 within the local function's FRAME decl. Either way, let's see if
4770 we can fill in the MEM_ATTRs for this memory. */
4771 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4772 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4774 /* Creator of a heap trampoline is responsible for making sure the
4775 address is aligned to at least STACK_BOUNDARY. Normally malloc
4776 will ensure this anyhow. */
4777 tmp = round_trampoline_addr (r_tramp);
4778 if (tmp != r_tramp)
4780 m_tramp = change_address (m_tramp, BLKmode, tmp);
4781 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4782 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4785 /* The FUNC argument should be the address of the nested function.
4786 Extract the actual function decl to pass to the hook. */
4787 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4788 t_func = TREE_OPERAND (t_func, 0);
4789 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4791 r_chain = expand_normal (t_chain);
4793 /* Generate insns to initialize the trampoline. */
4794 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4796 if (onstack)
4798 trampolines_created = 1;
4800 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4801 "trampoline generated for nested function %qD", t_func);
4804 return const0_rtx;
4807 static rtx
4808 expand_builtin_adjust_trampoline (tree exp)
4810 rtx tramp;
4812 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4813 return NULL_RTX;
4815 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4816 tramp = round_trampoline_addr (tramp);
4817 if (targetm.calls.trampoline_adjust_address)
4818 tramp = targetm.calls.trampoline_adjust_address (tramp);
4820 return tramp;
4823 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4824 function. The function first checks whether the back end provides
4825 an insn to implement signbit for the respective mode. If not, it
4826 checks whether the floating point format of the value is such that
4827 the sign bit can be extracted. If that is not the case, the
4828 function returns NULL_RTX to indicate that a normal call should be
4829 emitted rather than expanding the function in-line. EXP is the
4830 expression that is a call to the builtin function; if convenient,
4831 the result should be placed in TARGET. */
4832 static rtx
4833 expand_builtin_signbit (tree exp, rtx target)
4835 const struct real_format *fmt;
4836 enum machine_mode fmode, imode, rmode;
4837 tree arg;
4838 int word, bitpos;
4839 enum insn_code icode;
4840 rtx temp;
4841 location_t loc = EXPR_LOCATION (exp);
4843 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4844 return NULL_RTX;
4846 arg = CALL_EXPR_ARG (exp, 0);
4847 fmode = TYPE_MODE (TREE_TYPE (arg));
4848 rmode = TYPE_MODE (TREE_TYPE (exp));
4849 fmt = REAL_MODE_FORMAT (fmode);
4851 arg = builtin_save_expr (arg);
4853 /* Expand the argument yielding a RTX expression. */
4854 temp = expand_normal (arg);
4856 /* Check if the back end provides an insn that handles signbit for the
4857 argument's mode. */
4858 icode = optab_handler (signbit_optab, fmode);
4859 if (icode != CODE_FOR_nothing)
4861 rtx_insn *last = get_last_insn ();
4862 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4863 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4864 return target;
4865 delete_insns_since (last);
4868 /* For floating point formats without a sign bit, implement signbit
4869 as "ARG < 0.0". */
4870 bitpos = fmt->signbit_ro;
4871 if (bitpos < 0)
4873 /* But we can't do this if the format supports signed zero. */
4874 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4875 return NULL_RTX;
4877 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4878 build_real (TREE_TYPE (arg), dconst0));
4879 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4882 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4884 imode = int_mode_for_mode (fmode);
4885 if (imode == BLKmode)
4886 return NULL_RTX;
4887 temp = gen_lowpart (imode, temp);
4889 else
4891 imode = word_mode;
4892 /* Handle targets with different FP word orders. */
4893 if (FLOAT_WORDS_BIG_ENDIAN)
4894 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4895 else
4896 word = bitpos / BITS_PER_WORD;
4897 temp = operand_subword_force (temp, word, fmode);
4898 bitpos = bitpos % BITS_PER_WORD;
4901 /* Force the intermediate word_mode (or narrower) result into a
4902 register. This avoids attempting to create paradoxical SUBREGs
4903 of floating point modes below. */
4904 temp = force_reg (imode, temp);
4906 /* If the bitpos is within the "result mode" lowpart, the operation
4907 can be implement with a single bitwise AND. Otherwise, we need
4908 a right shift and an AND. */
4910 if (bitpos < GET_MODE_BITSIZE (rmode))
4912 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4914 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4915 temp = gen_lowpart (rmode, temp);
4916 temp = expand_binop (rmode, and_optab, temp,
4917 immed_wide_int_const (mask, rmode),
4918 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4920 else
4922 /* Perform a logical right shift to place the signbit in the least
4923 significant bit, then truncate the result to the desired mode
4924 and mask just this bit. */
4925 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4926 temp = gen_lowpart (rmode, temp);
4927 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4928 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4931 return temp;
4934 /* Expand fork or exec calls. TARGET is the desired target of the
4935 call. EXP is the call. FN is the
4936 identificator of the actual function. IGNORE is nonzero if the
4937 value is to be ignored. */
4939 static rtx
4940 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4942 tree id, decl;
4943 tree call;
4945 /* If we are not profiling, just call the function. */
4946 if (!profile_arc_flag)
4947 return NULL_RTX;
4949 /* Otherwise call the wrapper. This should be equivalent for the rest of
4950 compiler, so the code does not diverge, and the wrapper may run the
4951 code necessary for keeping the profiling sane. */
4953 switch (DECL_FUNCTION_CODE (fn))
4955 case BUILT_IN_FORK:
4956 id = get_identifier ("__gcov_fork");
4957 break;
4959 case BUILT_IN_EXECL:
4960 id = get_identifier ("__gcov_execl");
4961 break;
4963 case BUILT_IN_EXECV:
4964 id = get_identifier ("__gcov_execv");
4965 break;
4967 case BUILT_IN_EXECLP:
4968 id = get_identifier ("__gcov_execlp");
4969 break;
4971 case BUILT_IN_EXECLE:
4972 id = get_identifier ("__gcov_execle");
4973 break;
4975 case BUILT_IN_EXECVP:
4976 id = get_identifier ("__gcov_execvp");
4977 break;
4979 case BUILT_IN_EXECVE:
4980 id = get_identifier ("__gcov_execve");
4981 break;
4983 default:
4984 gcc_unreachable ();
4987 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4988 FUNCTION_DECL, id, TREE_TYPE (fn));
4989 DECL_EXTERNAL (decl) = 1;
4990 TREE_PUBLIC (decl) = 1;
4991 DECL_ARTIFICIAL (decl) = 1;
4992 TREE_NOTHROW (decl) = 1;
4993 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4994 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4995 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4996 return expand_call (call, target, ignore);
5001 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5002 the pointer in these functions is void*, the tree optimizers may remove
5003 casts. The mode computed in expand_builtin isn't reliable either, due
5004 to __sync_bool_compare_and_swap.
5006 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5007 group of builtins. This gives us log2 of the mode size. */
5009 static inline enum machine_mode
5010 get_builtin_sync_mode (int fcode_diff)
5012 /* The size is not negotiable, so ask not to get BLKmode in return
5013 if the target indicates that a smaller size would be better. */
5014 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5017 /* Expand the memory expression LOC and return the appropriate memory operand
5018 for the builtin_sync operations. */
5020 static rtx
5021 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5023 rtx addr, mem;
5025 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5026 addr = convert_memory_address (Pmode, addr);
5028 /* Note that we explicitly do not want any alias information for this
5029 memory, so that we kill all other live memories. Otherwise we don't
5030 satisfy the full barrier semantics of the intrinsic. */
5031 mem = validize_mem (gen_rtx_MEM (mode, addr));
5033 /* The alignment needs to be at least according to that of the mode. */
5034 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5035 get_pointer_alignment (loc)));
5036 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5037 MEM_VOLATILE_P (mem) = 1;
5039 return mem;
5042 /* Make sure an argument is in the right mode.
5043 EXP is the tree argument.
5044 MODE is the mode it should be in. */
5046 static rtx
5047 expand_expr_force_mode (tree exp, enum machine_mode mode)
5049 rtx val;
5050 enum machine_mode old_mode;
5052 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5053 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5054 of CONST_INTs, where we know the old_mode only from the call argument. */
5056 old_mode = GET_MODE (val);
5057 if (old_mode == VOIDmode)
5058 old_mode = TYPE_MODE (TREE_TYPE (exp));
5059 val = convert_modes (mode, old_mode, val, 1);
5060 return val;
5064 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5065 EXP is the CALL_EXPR. CODE is the rtx code
5066 that corresponds to the arithmetic or logical operation from the name;
5067 an exception here is that NOT actually means NAND. TARGET is an optional
5068 place for us to store the results; AFTER is true if this is the
5069 fetch_and_xxx form. */
5071 static rtx
5072 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5073 enum rtx_code code, bool after,
5074 rtx target)
5076 rtx val, mem;
5077 location_t loc = EXPR_LOCATION (exp);
5079 if (code == NOT && warn_sync_nand)
5081 tree fndecl = get_callee_fndecl (exp);
5082 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5084 static bool warned_f_a_n, warned_n_a_f;
5086 switch (fcode)
5088 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5089 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5090 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5091 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5092 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5093 if (warned_f_a_n)
5094 break;
5096 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5097 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5098 warned_f_a_n = true;
5099 break;
5101 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5102 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5103 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5104 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5105 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5106 if (warned_n_a_f)
5107 break;
5109 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5110 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5111 warned_n_a_f = true;
5112 break;
5114 default:
5115 gcc_unreachable ();
5119 /* Expand the operands. */
5120 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5121 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5123 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5124 after);
5127 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5128 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5129 true if this is the boolean form. TARGET is a place for us to store the
5130 results; this is NOT optional if IS_BOOL is true. */
5132 static rtx
5133 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5134 bool is_bool, rtx target)
5136 rtx old_val, new_val, mem;
5137 rtx *pbool, *poval;
5139 /* Expand the operands. */
5140 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5141 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5142 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5144 pbool = poval = NULL;
5145 if (target != const0_rtx)
5147 if (is_bool)
5148 pbool = &target;
5149 else
5150 poval = &target;
5152 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5153 false, MEMMODEL_SEQ_CST,
5154 MEMMODEL_SEQ_CST))
5155 return NULL_RTX;
5157 return target;
5160 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5161 general form is actually an atomic exchange, and some targets only
5162 support a reduced form with the second argument being a constant 1.
5163 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5164 the results. */
5166 static rtx
5167 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5168 rtx target)
5170 rtx val, mem;
5172 /* Expand the operands. */
5173 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5174 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5176 return expand_sync_lock_test_and_set (target, mem, val);
5179 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5181 static void
5182 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5184 rtx mem;
5186 /* Expand the operands. */
5187 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5189 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5192 /* Given an integer representing an ``enum memmodel'', verify its
5193 correctness and return the memory model enum. */
5195 static enum memmodel
5196 get_memmodel (tree exp)
5198 rtx op;
5199 unsigned HOST_WIDE_INT val;
5201 /* If the parameter is not a constant, it's a run time value so we'll just
5202 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5203 if (TREE_CODE (exp) != INTEGER_CST)
5204 return MEMMODEL_SEQ_CST;
5206 op = expand_normal (exp);
5208 val = INTVAL (op);
5209 if (targetm.memmodel_check)
5210 val = targetm.memmodel_check (val);
5211 else if (val & ~MEMMODEL_MASK)
5213 warning (OPT_Winvalid_memory_model,
5214 "Unknown architecture specifier in memory model to builtin.");
5215 return MEMMODEL_SEQ_CST;
5218 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5220 warning (OPT_Winvalid_memory_model,
5221 "invalid memory model argument to builtin");
5222 return MEMMODEL_SEQ_CST;
5225 return (enum memmodel) val;
5228 /* Expand the __atomic_exchange intrinsic:
5229 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5230 EXP is the CALL_EXPR.
5231 TARGET is an optional place for us to store the results. */
5233 static rtx
5234 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5236 rtx val, mem;
5237 enum memmodel model;
5239 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5240 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5242 error ("invalid memory model for %<__atomic_exchange%>");
5243 return NULL_RTX;
5246 if (!flag_inline_atomics)
5247 return NULL_RTX;
5249 /* Expand the operands. */
5250 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5251 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5253 return expand_atomic_exchange (target, mem, val, model);
5256 /* Expand the __atomic_compare_exchange intrinsic:
5257 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5258 TYPE desired, BOOL weak,
5259 enum memmodel success,
5260 enum memmodel failure)
5261 EXP is the CALL_EXPR.
5262 TARGET is an optional place for us to store the results. */
5264 static rtx
5265 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5266 rtx target)
5268 rtx expect, desired, mem, oldval;
5269 rtx_code_label *label;
5270 enum memmodel success, failure;
5271 tree weak;
5272 bool is_weak;
5274 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5275 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5277 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5278 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5280 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5281 return NULL_RTX;
5284 if (failure > success)
5286 error ("failure memory model cannot be stronger than success "
5287 "memory model for %<__atomic_compare_exchange%>");
5288 return NULL_RTX;
5291 if (!flag_inline_atomics)
5292 return NULL_RTX;
5294 /* Expand the operands. */
5295 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5297 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5298 expect = convert_memory_address (Pmode, expect);
5299 expect = gen_rtx_MEM (mode, expect);
5300 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5302 weak = CALL_EXPR_ARG (exp, 3);
5303 is_weak = false;
5304 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5305 is_weak = true;
5307 if (target == const0_rtx)
5308 target = NULL;
5310 /* Lest the rtl backend create a race condition with an imporoper store
5311 to memory, always create a new pseudo for OLDVAL. */
5312 oldval = NULL;
5314 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5315 is_weak, success, failure))
5316 return NULL_RTX;
5318 /* Conditionally store back to EXPECT, lest we create a race condition
5319 with an improper store to memory. */
5320 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5321 the normal case where EXPECT is totally private, i.e. a register. At
5322 which point the store can be unconditional. */
5323 label = gen_label_rtx ();
5324 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5325 emit_move_insn (expect, oldval);
5326 emit_label (label);
5328 return target;
5331 /* Expand the __atomic_load intrinsic:
5332 TYPE __atomic_load (TYPE *object, enum memmodel)
5333 EXP is the CALL_EXPR.
5334 TARGET is an optional place for us to store the results. */
5336 static rtx
5337 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5339 rtx mem;
5340 enum memmodel model;
5342 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5343 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5344 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5346 error ("invalid memory model for %<__atomic_load%>");
5347 return NULL_RTX;
5350 if (!flag_inline_atomics)
5351 return NULL_RTX;
5353 /* Expand the operand. */
5354 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5356 return expand_atomic_load (target, mem, model);
5360 /* Expand the __atomic_store intrinsic:
5361 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5362 EXP is the CALL_EXPR.
5363 TARGET is an optional place for us to store the results. */
5365 static rtx
5366 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5368 rtx mem, val;
5369 enum memmodel model;
5371 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5372 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5373 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5374 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5376 error ("invalid memory model for %<__atomic_store%>");
5377 return NULL_RTX;
5380 if (!flag_inline_atomics)
5381 return NULL_RTX;
5383 /* Expand the operands. */
5384 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5385 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5387 return expand_atomic_store (mem, val, model, false);
5390 /* Expand the __atomic_fetch_XXX intrinsic:
5391 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5392 EXP is the CALL_EXPR.
5393 TARGET is an optional place for us to store the results.
5394 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5395 FETCH_AFTER is true if returning the result of the operation.
5396 FETCH_AFTER is false if returning the value before the operation.
5397 IGNORE is true if the result is not used.
5398 EXT_CALL is the correct builtin for an external call if this cannot be
5399 resolved to an instruction sequence. */
5401 static rtx
5402 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5403 enum rtx_code code, bool fetch_after,
5404 bool ignore, enum built_in_function ext_call)
5406 rtx val, mem, ret;
5407 enum memmodel model;
5408 tree fndecl;
5409 tree addr;
5411 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5413 /* Expand the operands. */
5414 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5415 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5417 /* Only try generating instructions if inlining is turned on. */
5418 if (flag_inline_atomics)
5420 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5421 if (ret)
5422 return ret;
5425 /* Return if a different routine isn't needed for the library call. */
5426 if (ext_call == BUILT_IN_NONE)
5427 return NULL_RTX;
5429 /* Change the call to the specified function. */
5430 fndecl = get_callee_fndecl (exp);
5431 addr = CALL_EXPR_FN (exp);
5432 STRIP_NOPS (addr);
5434 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5435 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5437 /* Expand the call here so we can emit trailing code. */
5438 ret = expand_call (exp, target, ignore);
5440 /* Replace the original function just in case it matters. */
5441 TREE_OPERAND (addr, 0) = fndecl;
5443 /* Then issue the arithmetic correction to return the right result. */
5444 if (!ignore)
5446 if (code == NOT)
5448 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5449 OPTAB_LIB_WIDEN);
5450 ret = expand_simple_unop (mode, NOT, ret, target, true);
5452 else
5453 ret = expand_simple_binop (mode, code, ret, val, target, true,
5454 OPTAB_LIB_WIDEN);
5456 return ret;
5460 #ifndef HAVE_atomic_clear
5461 # define HAVE_atomic_clear 0
5462 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5463 #endif
5465 /* Expand an atomic clear operation.
5466 void _atomic_clear (BOOL *obj, enum memmodel)
5467 EXP is the call expression. */
5469 static rtx
5470 expand_builtin_atomic_clear (tree exp)
5472 enum machine_mode mode;
5473 rtx mem, ret;
5474 enum memmodel model;
5476 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5477 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5478 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5480 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5481 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5483 error ("invalid memory model for %<__atomic_store%>");
5484 return const0_rtx;
5487 if (HAVE_atomic_clear)
5489 emit_insn (gen_atomic_clear (mem, model));
5490 return const0_rtx;
5493 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5494 Failing that, a store is issued by __atomic_store. The only way this can
5495 fail is if the bool type is larger than a word size. Unlikely, but
5496 handle it anyway for completeness. Assume a single threaded model since
5497 there is no atomic support in this case, and no barriers are required. */
5498 ret = expand_atomic_store (mem, const0_rtx, model, true);
5499 if (!ret)
5500 emit_move_insn (mem, const0_rtx);
5501 return const0_rtx;
5504 /* Expand an atomic test_and_set operation.
5505 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5506 EXP is the call expression. */
5508 static rtx
5509 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5511 rtx mem;
5512 enum memmodel model;
5513 enum machine_mode mode;
5515 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5516 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5517 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5519 return expand_atomic_test_and_set (target, mem, model);
5523 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5524 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5526 static tree
5527 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5529 int size;
5530 enum machine_mode mode;
5531 unsigned int mode_align, type_align;
5533 if (TREE_CODE (arg0) != INTEGER_CST)
5534 return NULL_TREE;
5536 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5537 mode = mode_for_size (size, MODE_INT, 0);
5538 mode_align = GET_MODE_ALIGNMENT (mode);
5540 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5541 type_align = mode_align;
5542 else
5544 tree ttype = TREE_TYPE (arg1);
5546 /* This function is usually invoked and folded immediately by the front
5547 end before anything else has a chance to look at it. The pointer
5548 parameter at this point is usually cast to a void *, so check for that
5549 and look past the cast. */
5550 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5551 && VOID_TYPE_P (TREE_TYPE (ttype)))
5552 arg1 = TREE_OPERAND (arg1, 0);
5554 ttype = TREE_TYPE (arg1);
5555 gcc_assert (POINTER_TYPE_P (ttype));
5557 /* Get the underlying type of the object. */
5558 ttype = TREE_TYPE (ttype);
5559 type_align = TYPE_ALIGN (ttype);
5562 /* If the object has smaller alignment, the the lock free routines cannot
5563 be used. */
5564 if (type_align < mode_align)
5565 return boolean_false_node;
5567 /* Check if a compare_and_swap pattern exists for the mode which represents
5568 the required size. The pattern is not allowed to fail, so the existence
5569 of the pattern indicates support is present. */
5570 if (can_compare_and_swap_p (mode, true))
5571 return boolean_true_node;
5572 else
5573 return boolean_false_node;
5576 /* Return true if the parameters to call EXP represent an object which will
5577 always generate lock free instructions. The first argument represents the
5578 size of the object, and the second parameter is a pointer to the object
5579 itself. If NULL is passed for the object, then the result is based on
5580 typical alignment for an object of the specified size. Otherwise return
5581 false. */
5583 static rtx
5584 expand_builtin_atomic_always_lock_free (tree exp)
5586 tree size;
5587 tree arg0 = CALL_EXPR_ARG (exp, 0);
5588 tree arg1 = CALL_EXPR_ARG (exp, 1);
5590 if (TREE_CODE (arg0) != INTEGER_CST)
5592 error ("non-constant argument 1 to __atomic_always_lock_free");
5593 return const0_rtx;
5596 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5597 if (size == boolean_true_node)
5598 return const1_rtx;
5599 return const0_rtx;
5602 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5603 is lock free on this architecture. */
5605 static tree
5606 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5608 if (!flag_inline_atomics)
5609 return NULL_TREE;
5611 /* If it isn't always lock free, don't generate a result. */
5612 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5613 return boolean_true_node;
5615 return NULL_TREE;
5618 /* Return true if the parameters to call EXP represent an object which will
5619 always generate lock free instructions. The first argument represents the
5620 size of the object, and the second parameter is a pointer to the object
5621 itself. If NULL is passed for the object, then the result is based on
5622 typical alignment for an object of the specified size. Otherwise return
5623 NULL*/
5625 static rtx
5626 expand_builtin_atomic_is_lock_free (tree exp)
5628 tree size;
5629 tree arg0 = CALL_EXPR_ARG (exp, 0);
5630 tree arg1 = CALL_EXPR_ARG (exp, 1);
5632 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5634 error ("non-integer argument 1 to __atomic_is_lock_free");
5635 return NULL_RTX;
5638 if (!flag_inline_atomics)
5639 return NULL_RTX;
5641 /* If the value is known at compile time, return the RTX for it. */
5642 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5643 if (size == boolean_true_node)
5644 return const1_rtx;
5646 return NULL_RTX;
5649 /* Expand the __atomic_thread_fence intrinsic:
5650 void __atomic_thread_fence (enum memmodel)
5651 EXP is the CALL_EXPR. */
5653 static void
5654 expand_builtin_atomic_thread_fence (tree exp)
5656 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5657 expand_mem_thread_fence (model);
5660 /* Expand the __atomic_signal_fence intrinsic:
5661 void __atomic_signal_fence (enum memmodel)
5662 EXP is the CALL_EXPR. */
5664 static void
5665 expand_builtin_atomic_signal_fence (tree exp)
5667 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5668 expand_mem_signal_fence (model);
5671 /* Expand the __sync_synchronize intrinsic. */
5673 static void
5674 expand_builtin_sync_synchronize (void)
5676 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5679 static rtx
5680 expand_builtin_thread_pointer (tree exp, rtx target)
5682 enum insn_code icode;
5683 if (!validate_arglist (exp, VOID_TYPE))
5684 return const0_rtx;
5685 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5686 if (icode != CODE_FOR_nothing)
5688 struct expand_operand op;
5689 /* If the target is not sutitable then create a new target. */
5690 if (target == NULL_RTX
5691 || !REG_P (target)
5692 || GET_MODE (target) != Pmode)
5693 target = gen_reg_rtx (Pmode);
5694 create_output_operand (&op, target, Pmode);
5695 expand_insn (icode, 1, &op);
5696 return target;
5698 error ("__builtin_thread_pointer is not supported on this target");
5699 return const0_rtx;
5702 static void
5703 expand_builtin_set_thread_pointer (tree exp)
5705 enum insn_code icode;
5706 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5707 return;
5708 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5709 if (icode != CODE_FOR_nothing)
5711 struct expand_operand op;
5712 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5713 Pmode, EXPAND_NORMAL);
5714 create_input_operand (&op, val, Pmode);
5715 expand_insn (icode, 1, &op);
5716 return;
5718 error ("__builtin_set_thread_pointer is not supported on this target");
5722 /* Emit code to restore the current value of stack. */
5724 static void
5725 expand_stack_restore (tree var)
5727 rtx_insn *prev;
5728 rtx sa = expand_normal (var);
5730 sa = convert_memory_address (Pmode, sa);
5732 prev = get_last_insn ();
5733 emit_stack_restore (SAVE_BLOCK, sa);
5734 fixup_args_size_notes (prev, get_last_insn (), 0);
5738 /* Emit code to save the current value of stack. */
5740 static rtx
5741 expand_stack_save (void)
5743 rtx ret = NULL_RTX;
5745 do_pending_stack_adjust ();
5746 emit_stack_save (SAVE_BLOCK, &ret);
5747 return ret;
5751 /* Expand OpenACC acc_on_device.
5753 This has to happen late (that is, not in early folding; expand_builtin_*,
5754 rather than fold_builtin_*), as we have to act differently for host and
5755 acceleration device (ACCEL_COMPILER conditional). */
5757 static rtx
5758 expand_builtin_acc_on_device (tree exp, rtx target ATTRIBUTE_UNUSED)
5760 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5761 return NULL_RTX;
5763 tree arg, v1, v2, ret;
5764 location_t loc;
5766 arg = CALL_EXPR_ARG (exp, 0);
5767 arg = builtin_save_expr (arg);
5768 loc = EXPR_LOCATION (exp);
5770 /* Build: (arg == v1 || arg == v2) ? 1 : 0. */
5772 #ifdef ACCEL_COMPILER
5773 v1 = build_int_cst (TREE_TYPE (arg), /* TODO: acc_device_not_host */ 3);
5774 v2 = build_int_cst (TREE_TYPE (arg), ACCEL_COMPILER_acc_device);
5775 #else
5776 v1 = build_int_cst (TREE_TYPE (arg), /* TODO: acc_device_none */ 0);
5777 v2 = build_int_cst (TREE_TYPE (arg), /* TODO: acc_device_host */ 2);
5778 #endif
5780 v1 = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg, v1);
5781 v2 = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg, v2);
5783 /* Can't use TRUTH_ORIF_EXPR, as that is not supported by
5784 expand_expr_real*. */
5785 ret = fold_build3_loc (loc, COND_EXPR, integer_type_node, v1, v1, v2);
5786 ret = fold_build3_loc (loc, COND_EXPR, integer_type_node,
5787 ret, integer_one_node, integer_zero_node);
5789 return expand_normal (ret);
5793 /* Expand an expression EXP that calls a built-in function,
5794 with result going to TARGET if that's convenient
5795 (and in mode MODE if that's convenient).
5796 SUBTARGET may be used as the target for computing one of EXP's operands.
5797 IGNORE is nonzero if the value is to be ignored. */
5800 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5801 int ignore)
5803 tree fndecl = get_callee_fndecl (exp);
5804 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5805 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5806 int flags;
5808 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5809 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5811 /* When not optimizing, generate calls to library functions for a certain
5812 set of builtins. */
5813 if (!optimize
5814 && !called_as_built_in (fndecl)
5815 && fcode != BUILT_IN_FORK
5816 && fcode != BUILT_IN_EXECL
5817 && fcode != BUILT_IN_EXECV
5818 && fcode != BUILT_IN_EXECLP
5819 && fcode != BUILT_IN_EXECLE
5820 && fcode != BUILT_IN_EXECVP
5821 && fcode != BUILT_IN_EXECVE
5822 && fcode != BUILT_IN_ALLOCA
5823 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5824 && fcode != BUILT_IN_FREE)
5825 return expand_call (exp, target, ignore);
5827 /* The built-in function expanders test for target == const0_rtx
5828 to determine whether the function's result will be ignored. */
5829 if (ignore)
5830 target = const0_rtx;
5832 /* If the result of a pure or const built-in function is ignored, and
5833 none of its arguments are volatile, we can avoid expanding the
5834 built-in call and just evaluate the arguments for side-effects. */
5835 if (target == const0_rtx
5836 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5837 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5839 bool volatilep = false;
5840 tree arg;
5841 call_expr_arg_iterator iter;
5843 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5844 if (TREE_THIS_VOLATILE (arg))
5846 volatilep = true;
5847 break;
5850 if (! volatilep)
5852 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5853 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5854 return const0_rtx;
5858 switch (fcode)
5860 CASE_FLT_FN (BUILT_IN_FABS):
5861 case BUILT_IN_FABSD32:
5862 case BUILT_IN_FABSD64:
5863 case BUILT_IN_FABSD128:
5864 target = expand_builtin_fabs (exp, target, subtarget);
5865 if (target)
5866 return target;
5867 break;
5869 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5870 target = expand_builtin_copysign (exp, target, subtarget);
5871 if (target)
5872 return target;
5873 break;
5875 /* Just do a normal library call if we were unable to fold
5876 the values. */
5877 CASE_FLT_FN (BUILT_IN_CABS):
5878 break;
5880 CASE_FLT_FN (BUILT_IN_EXP):
5881 CASE_FLT_FN (BUILT_IN_EXP10):
5882 CASE_FLT_FN (BUILT_IN_POW10):
5883 CASE_FLT_FN (BUILT_IN_EXP2):
5884 CASE_FLT_FN (BUILT_IN_EXPM1):
5885 CASE_FLT_FN (BUILT_IN_LOGB):
5886 CASE_FLT_FN (BUILT_IN_LOG):
5887 CASE_FLT_FN (BUILT_IN_LOG10):
5888 CASE_FLT_FN (BUILT_IN_LOG2):
5889 CASE_FLT_FN (BUILT_IN_LOG1P):
5890 CASE_FLT_FN (BUILT_IN_TAN):
5891 CASE_FLT_FN (BUILT_IN_ASIN):
5892 CASE_FLT_FN (BUILT_IN_ACOS):
5893 CASE_FLT_FN (BUILT_IN_ATAN):
5894 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5895 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5896 because of possible accuracy problems. */
5897 if (! flag_unsafe_math_optimizations)
5898 break;
5899 CASE_FLT_FN (BUILT_IN_SQRT):
5900 CASE_FLT_FN (BUILT_IN_FLOOR):
5901 CASE_FLT_FN (BUILT_IN_CEIL):
5902 CASE_FLT_FN (BUILT_IN_TRUNC):
5903 CASE_FLT_FN (BUILT_IN_ROUND):
5904 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5905 CASE_FLT_FN (BUILT_IN_RINT):
5906 target = expand_builtin_mathfn (exp, target, subtarget);
5907 if (target)
5908 return target;
5909 break;
5911 CASE_FLT_FN (BUILT_IN_FMA):
5912 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5913 if (target)
5914 return target;
5915 break;
5917 CASE_FLT_FN (BUILT_IN_ILOGB):
5918 if (! flag_unsafe_math_optimizations)
5919 break;
5920 CASE_FLT_FN (BUILT_IN_ISINF):
5921 CASE_FLT_FN (BUILT_IN_FINITE):
5922 case BUILT_IN_ISFINITE:
5923 case BUILT_IN_ISNORMAL:
5924 target = expand_builtin_interclass_mathfn (exp, target);
5925 if (target)
5926 return target;
5927 break;
5929 CASE_FLT_FN (BUILT_IN_ICEIL):
5930 CASE_FLT_FN (BUILT_IN_LCEIL):
5931 CASE_FLT_FN (BUILT_IN_LLCEIL):
5932 CASE_FLT_FN (BUILT_IN_LFLOOR):
5933 CASE_FLT_FN (BUILT_IN_IFLOOR):
5934 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5935 target = expand_builtin_int_roundingfn (exp, target);
5936 if (target)
5937 return target;
5938 break;
5940 CASE_FLT_FN (BUILT_IN_IRINT):
5941 CASE_FLT_FN (BUILT_IN_LRINT):
5942 CASE_FLT_FN (BUILT_IN_LLRINT):
5943 CASE_FLT_FN (BUILT_IN_IROUND):
5944 CASE_FLT_FN (BUILT_IN_LROUND):
5945 CASE_FLT_FN (BUILT_IN_LLROUND):
5946 target = expand_builtin_int_roundingfn_2 (exp, target);
5947 if (target)
5948 return target;
5949 break;
5951 CASE_FLT_FN (BUILT_IN_POWI):
5952 target = expand_builtin_powi (exp, target);
5953 if (target)
5954 return target;
5955 break;
5957 CASE_FLT_FN (BUILT_IN_ATAN2):
5958 CASE_FLT_FN (BUILT_IN_LDEXP):
5959 CASE_FLT_FN (BUILT_IN_SCALB):
5960 CASE_FLT_FN (BUILT_IN_SCALBN):
5961 CASE_FLT_FN (BUILT_IN_SCALBLN):
5962 if (! flag_unsafe_math_optimizations)
5963 break;
5965 CASE_FLT_FN (BUILT_IN_FMOD):
5966 CASE_FLT_FN (BUILT_IN_REMAINDER):
5967 CASE_FLT_FN (BUILT_IN_DREM):
5968 CASE_FLT_FN (BUILT_IN_POW):
5969 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5970 if (target)
5971 return target;
5972 break;
5974 CASE_FLT_FN (BUILT_IN_CEXPI):
5975 target = expand_builtin_cexpi (exp, target);
5976 gcc_assert (target);
5977 return target;
5979 CASE_FLT_FN (BUILT_IN_SIN):
5980 CASE_FLT_FN (BUILT_IN_COS):
5981 if (! flag_unsafe_math_optimizations)
5982 break;
5983 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5984 if (target)
5985 return target;
5986 break;
5988 CASE_FLT_FN (BUILT_IN_SINCOS):
5989 if (! flag_unsafe_math_optimizations)
5990 break;
5991 target = expand_builtin_sincos (exp);
5992 if (target)
5993 return target;
5994 break;
5996 case BUILT_IN_APPLY_ARGS:
5997 return expand_builtin_apply_args ();
5999 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6000 FUNCTION with a copy of the parameters described by
6001 ARGUMENTS, and ARGSIZE. It returns a block of memory
6002 allocated on the stack into which is stored all the registers
6003 that might possibly be used for returning the result of a
6004 function. ARGUMENTS is the value returned by
6005 __builtin_apply_args. ARGSIZE is the number of bytes of
6006 arguments that must be copied. ??? How should this value be
6007 computed? We'll also need a safe worst case value for varargs
6008 functions. */
6009 case BUILT_IN_APPLY:
6010 if (!validate_arglist (exp, POINTER_TYPE,
6011 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6012 && !validate_arglist (exp, REFERENCE_TYPE,
6013 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6014 return const0_rtx;
6015 else
6017 rtx ops[3];
6019 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6020 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6021 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6023 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6026 /* __builtin_return (RESULT) causes the function to return the
6027 value described by RESULT. RESULT is address of the block of
6028 memory returned by __builtin_apply. */
6029 case BUILT_IN_RETURN:
6030 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6031 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6032 return const0_rtx;
6034 case BUILT_IN_SAVEREGS:
6035 return expand_builtin_saveregs ();
6037 case BUILT_IN_VA_ARG_PACK:
6038 /* All valid uses of __builtin_va_arg_pack () are removed during
6039 inlining. */
6040 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6041 return const0_rtx;
6043 case BUILT_IN_VA_ARG_PACK_LEN:
6044 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6045 inlining. */
6046 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6047 return const0_rtx;
6049 /* Return the address of the first anonymous stack arg. */
6050 case BUILT_IN_NEXT_ARG:
6051 if (fold_builtin_next_arg (exp, false))
6052 return const0_rtx;
6053 return expand_builtin_next_arg ();
6055 case BUILT_IN_CLEAR_CACHE:
6056 target = expand_builtin___clear_cache (exp);
6057 if (target)
6058 return target;
6059 break;
6061 case BUILT_IN_CLASSIFY_TYPE:
6062 return expand_builtin_classify_type (exp);
6064 case BUILT_IN_CONSTANT_P:
6065 return const0_rtx;
6067 case BUILT_IN_FRAME_ADDRESS:
6068 case BUILT_IN_RETURN_ADDRESS:
6069 return expand_builtin_frame_address (fndecl, exp);
6071 /* Returns the address of the area where the structure is returned.
6072 0 otherwise. */
6073 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6074 if (call_expr_nargs (exp) != 0
6075 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6076 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6077 return const0_rtx;
6078 else
6079 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6081 case BUILT_IN_ALLOCA:
6082 case BUILT_IN_ALLOCA_WITH_ALIGN:
6083 /* If the allocation stems from the declaration of a variable-sized
6084 object, it cannot accumulate. */
6085 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6086 if (target)
6087 return target;
6088 break;
6090 case BUILT_IN_STACK_SAVE:
6091 return expand_stack_save ();
6093 case BUILT_IN_STACK_RESTORE:
6094 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6095 return const0_rtx;
6097 case BUILT_IN_BSWAP16:
6098 case BUILT_IN_BSWAP32:
6099 case BUILT_IN_BSWAP64:
6100 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6101 if (target)
6102 return target;
6103 break;
6105 CASE_INT_FN (BUILT_IN_FFS):
6106 target = expand_builtin_unop (target_mode, exp, target,
6107 subtarget, ffs_optab);
6108 if (target)
6109 return target;
6110 break;
6112 CASE_INT_FN (BUILT_IN_CLZ):
6113 target = expand_builtin_unop (target_mode, exp, target,
6114 subtarget, clz_optab);
6115 if (target)
6116 return target;
6117 break;
6119 CASE_INT_FN (BUILT_IN_CTZ):
6120 target = expand_builtin_unop (target_mode, exp, target,
6121 subtarget, ctz_optab);
6122 if (target)
6123 return target;
6124 break;
6126 CASE_INT_FN (BUILT_IN_CLRSB):
6127 target = expand_builtin_unop (target_mode, exp, target,
6128 subtarget, clrsb_optab);
6129 if (target)
6130 return target;
6131 break;
6133 CASE_INT_FN (BUILT_IN_POPCOUNT):
6134 target = expand_builtin_unop (target_mode, exp, target,
6135 subtarget, popcount_optab);
6136 if (target)
6137 return target;
6138 break;
6140 CASE_INT_FN (BUILT_IN_PARITY):
6141 target = expand_builtin_unop (target_mode, exp, target,
6142 subtarget, parity_optab);
6143 if (target)
6144 return target;
6145 break;
6147 case BUILT_IN_STRLEN:
6148 target = expand_builtin_strlen (exp, target, target_mode);
6149 if (target)
6150 return target;
6151 break;
6153 case BUILT_IN_STRCPY:
6154 target = expand_builtin_strcpy (exp, target);
6155 if (target)
6156 return target;
6157 break;
6159 case BUILT_IN_STRNCPY:
6160 target = expand_builtin_strncpy (exp, target);
6161 if (target)
6162 return target;
6163 break;
6165 case BUILT_IN_STPCPY:
6166 target = expand_builtin_stpcpy (exp, target, mode);
6167 if (target)
6168 return target;
6169 break;
6171 case BUILT_IN_MEMCPY:
6172 target = expand_builtin_memcpy (exp, target);
6173 if (target)
6174 return target;
6175 break;
6177 case BUILT_IN_MEMPCPY:
6178 target = expand_builtin_mempcpy (exp, target, mode);
6179 if (target)
6180 return target;
6181 break;
6183 case BUILT_IN_MEMSET:
6184 target = expand_builtin_memset (exp, target, mode);
6185 if (target)
6186 return target;
6187 break;
6189 case BUILT_IN_BZERO:
6190 target = expand_builtin_bzero (exp);
6191 if (target)
6192 return target;
6193 break;
6195 case BUILT_IN_STRCMP:
6196 target = expand_builtin_strcmp (exp, target);
6197 if (target)
6198 return target;
6199 break;
6201 case BUILT_IN_STRNCMP:
6202 target = expand_builtin_strncmp (exp, target, mode);
6203 if (target)
6204 return target;
6205 break;
6207 case BUILT_IN_BCMP:
6208 case BUILT_IN_MEMCMP:
6209 target = expand_builtin_memcmp (exp, target, mode);
6210 if (target)
6211 return target;
6212 break;
6214 case BUILT_IN_SETJMP:
6215 /* This should have been lowered to the builtins below. */
6216 gcc_unreachable ();
6218 case BUILT_IN_SETJMP_SETUP:
6219 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6220 and the receiver label. */
6221 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6223 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6224 VOIDmode, EXPAND_NORMAL);
6225 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6226 rtx label_r = label_rtx (label);
6228 /* This is copied from the handling of non-local gotos. */
6229 expand_builtin_setjmp_setup (buf_addr, label_r);
6230 nonlocal_goto_handler_labels
6231 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6232 nonlocal_goto_handler_labels);
6233 /* ??? Do not let expand_label treat us as such since we would
6234 not want to be both on the list of non-local labels and on
6235 the list of forced labels. */
6236 FORCED_LABEL (label) = 0;
6237 return const0_rtx;
6239 break;
6241 case BUILT_IN_SETJMP_RECEIVER:
6242 /* __builtin_setjmp_receiver is passed the receiver label. */
6243 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6245 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6246 rtx label_r = label_rtx (label);
6248 expand_builtin_setjmp_receiver (label_r);
6249 return const0_rtx;
6251 break;
6253 /* __builtin_longjmp is passed a pointer to an array of five words.
6254 It's similar to the C library longjmp function but works with
6255 __builtin_setjmp above. */
6256 case BUILT_IN_LONGJMP:
6257 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6259 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6260 VOIDmode, EXPAND_NORMAL);
6261 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6263 if (value != const1_rtx)
6265 error ("%<__builtin_longjmp%> second argument must be 1");
6266 return const0_rtx;
6269 expand_builtin_longjmp (buf_addr, value);
6270 return const0_rtx;
6272 break;
6274 case BUILT_IN_NONLOCAL_GOTO:
6275 target = expand_builtin_nonlocal_goto (exp);
6276 if (target)
6277 return target;
6278 break;
6280 /* This updates the setjmp buffer that is its argument with the value
6281 of the current stack pointer. */
6282 case BUILT_IN_UPDATE_SETJMP_BUF:
6283 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6285 rtx buf_addr
6286 = expand_normal (CALL_EXPR_ARG (exp, 0));
6288 expand_builtin_update_setjmp_buf (buf_addr);
6289 return const0_rtx;
6291 break;
6293 case BUILT_IN_TRAP:
6294 expand_builtin_trap ();
6295 return const0_rtx;
6297 case BUILT_IN_UNREACHABLE:
6298 expand_builtin_unreachable ();
6299 return const0_rtx;
6301 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6302 case BUILT_IN_SIGNBITD32:
6303 case BUILT_IN_SIGNBITD64:
6304 case BUILT_IN_SIGNBITD128:
6305 target = expand_builtin_signbit (exp, target);
6306 if (target)
6307 return target;
6308 break;
6310 /* Various hooks for the DWARF 2 __throw routine. */
6311 case BUILT_IN_UNWIND_INIT:
6312 expand_builtin_unwind_init ();
6313 return const0_rtx;
6314 case BUILT_IN_DWARF_CFA:
6315 return virtual_cfa_rtx;
6316 #ifdef DWARF2_UNWIND_INFO
6317 case BUILT_IN_DWARF_SP_COLUMN:
6318 return expand_builtin_dwarf_sp_column ();
6319 case BUILT_IN_INIT_DWARF_REG_SIZES:
6320 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6321 return const0_rtx;
6322 #endif
6323 case BUILT_IN_FROB_RETURN_ADDR:
6324 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6325 case BUILT_IN_EXTRACT_RETURN_ADDR:
6326 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6327 case BUILT_IN_EH_RETURN:
6328 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6329 CALL_EXPR_ARG (exp, 1));
6330 return const0_rtx;
6331 #ifdef EH_RETURN_DATA_REGNO
6332 case BUILT_IN_EH_RETURN_DATA_REGNO:
6333 return expand_builtin_eh_return_data_regno (exp);
6334 #endif
6335 case BUILT_IN_EXTEND_POINTER:
6336 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6337 case BUILT_IN_EH_POINTER:
6338 return expand_builtin_eh_pointer (exp);
6339 case BUILT_IN_EH_FILTER:
6340 return expand_builtin_eh_filter (exp);
6341 case BUILT_IN_EH_COPY_VALUES:
6342 return expand_builtin_eh_copy_values (exp);
6344 case BUILT_IN_VA_START:
6345 return expand_builtin_va_start (exp);
6346 case BUILT_IN_VA_END:
6347 return expand_builtin_va_end (exp);
6348 case BUILT_IN_VA_COPY:
6349 return expand_builtin_va_copy (exp);
6350 case BUILT_IN_EXPECT:
6351 return expand_builtin_expect (exp, target);
6352 case BUILT_IN_ASSUME_ALIGNED:
6353 return expand_builtin_assume_aligned (exp, target);
6354 case BUILT_IN_PREFETCH:
6355 expand_builtin_prefetch (exp);
6356 return const0_rtx;
6358 case BUILT_IN_INIT_TRAMPOLINE:
6359 return expand_builtin_init_trampoline (exp, true);
6360 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6361 return expand_builtin_init_trampoline (exp, false);
6362 case BUILT_IN_ADJUST_TRAMPOLINE:
6363 return expand_builtin_adjust_trampoline (exp);
6365 case BUILT_IN_FORK:
6366 case BUILT_IN_EXECL:
6367 case BUILT_IN_EXECV:
6368 case BUILT_IN_EXECLP:
6369 case BUILT_IN_EXECLE:
6370 case BUILT_IN_EXECVP:
6371 case BUILT_IN_EXECVE:
6372 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6373 if (target)
6374 return target;
6375 break;
6377 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6378 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6379 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6380 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6381 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6382 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6383 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6384 if (target)
6385 return target;
6386 break;
6388 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6389 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6390 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6391 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6392 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6393 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6394 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6395 if (target)
6396 return target;
6397 break;
6399 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6400 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6401 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6402 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6403 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6404 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6405 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6406 if (target)
6407 return target;
6408 break;
6410 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6411 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6412 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6413 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6414 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6415 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6416 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6417 if (target)
6418 return target;
6419 break;
6421 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6422 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6423 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6424 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6425 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6426 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6427 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6428 if (target)
6429 return target;
6430 break;
6432 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6433 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6434 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6435 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6436 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6437 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6438 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6439 if (target)
6440 return target;
6441 break;
6443 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6444 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6445 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6446 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6447 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6448 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6449 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6450 if (target)
6451 return target;
6452 break;
6454 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6455 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6456 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6457 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6458 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6459 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6460 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6461 if (target)
6462 return target;
6463 break;
6465 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6466 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6467 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6468 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6469 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6470 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6471 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6472 if (target)
6473 return target;
6474 break;
6476 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6477 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6478 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6479 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6480 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6481 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6482 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6483 if (target)
6484 return target;
6485 break;
6487 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6488 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6489 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6490 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6491 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6492 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6493 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6494 if (target)
6495 return target;
6496 break;
6498 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6499 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6500 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6501 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6502 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6503 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6504 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6505 if (target)
6506 return target;
6507 break;
6509 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6510 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6511 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6512 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6513 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6514 if (mode == VOIDmode)
6515 mode = TYPE_MODE (boolean_type_node);
6516 if (!target || !register_operand (target, mode))
6517 target = gen_reg_rtx (mode);
6519 mode = get_builtin_sync_mode
6520 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6521 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6522 if (target)
6523 return target;
6524 break;
6526 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6527 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6528 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6529 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6530 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6531 mode = get_builtin_sync_mode
6532 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6533 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6534 if (target)
6535 return target;
6536 break;
6538 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6539 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6540 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6541 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6542 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6543 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6544 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6545 if (target)
6546 return target;
6547 break;
6549 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6550 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6551 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6552 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6553 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6554 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6555 expand_builtin_sync_lock_release (mode, exp);
6556 return const0_rtx;
6558 case BUILT_IN_SYNC_SYNCHRONIZE:
6559 expand_builtin_sync_synchronize ();
6560 return const0_rtx;
6562 case BUILT_IN_ATOMIC_EXCHANGE_1:
6563 case BUILT_IN_ATOMIC_EXCHANGE_2:
6564 case BUILT_IN_ATOMIC_EXCHANGE_4:
6565 case BUILT_IN_ATOMIC_EXCHANGE_8:
6566 case BUILT_IN_ATOMIC_EXCHANGE_16:
6567 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6568 target = expand_builtin_atomic_exchange (mode, exp, target);
6569 if (target)
6570 return target;
6571 break;
6573 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6574 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6575 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6576 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6577 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6579 unsigned int nargs, z;
6580 vec<tree, va_gc> *vec;
6582 mode =
6583 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6584 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6585 if (target)
6586 return target;
6588 /* If this is turned into an external library call, the weak parameter
6589 must be dropped to match the expected parameter list. */
6590 nargs = call_expr_nargs (exp);
6591 vec_alloc (vec, nargs - 1);
6592 for (z = 0; z < 3; z++)
6593 vec->quick_push (CALL_EXPR_ARG (exp, z));
6594 /* Skip the boolean weak parameter. */
6595 for (z = 4; z < 6; z++)
6596 vec->quick_push (CALL_EXPR_ARG (exp, z));
6597 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6598 break;
6601 case BUILT_IN_ATOMIC_LOAD_1:
6602 case BUILT_IN_ATOMIC_LOAD_2:
6603 case BUILT_IN_ATOMIC_LOAD_4:
6604 case BUILT_IN_ATOMIC_LOAD_8:
6605 case BUILT_IN_ATOMIC_LOAD_16:
6606 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6607 target = expand_builtin_atomic_load (mode, exp, target);
6608 if (target)
6609 return target;
6610 break;
6612 case BUILT_IN_ATOMIC_STORE_1:
6613 case BUILT_IN_ATOMIC_STORE_2:
6614 case BUILT_IN_ATOMIC_STORE_4:
6615 case BUILT_IN_ATOMIC_STORE_8:
6616 case BUILT_IN_ATOMIC_STORE_16:
6617 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6618 target = expand_builtin_atomic_store (mode, exp);
6619 if (target)
6620 return const0_rtx;
6621 break;
6623 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6624 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6625 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6626 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6627 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6629 enum built_in_function lib;
6630 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6631 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6632 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6633 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6634 ignore, lib);
6635 if (target)
6636 return target;
6637 break;
6639 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6640 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6641 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6642 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6643 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6645 enum built_in_function lib;
6646 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6647 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6648 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6649 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6650 ignore, lib);
6651 if (target)
6652 return target;
6653 break;
6655 case BUILT_IN_ATOMIC_AND_FETCH_1:
6656 case BUILT_IN_ATOMIC_AND_FETCH_2:
6657 case BUILT_IN_ATOMIC_AND_FETCH_4:
6658 case BUILT_IN_ATOMIC_AND_FETCH_8:
6659 case BUILT_IN_ATOMIC_AND_FETCH_16:
6661 enum built_in_function lib;
6662 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6663 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6664 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6665 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6666 ignore, lib);
6667 if (target)
6668 return target;
6669 break;
6671 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6672 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6673 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6674 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6675 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6677 enum built_in_function lib;
6678 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6679 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6680 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6681 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6682 ignore, lib);
6683 if (target)
6684 return target;
6685 break;
6687 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6688 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6689 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6690 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6691 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6693 enum built_in_function lib;
6694 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6695 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6696 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6697 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6698 ignore, lib);
6699 if (target)
6700 return target;
6701 break;
6703 case BUILT_IN_ATOMIC_OR_FETCH_1:
6704 case BUILT_IN_ATOMIC_OR_FETCH_2:
6705 case BUILT_IN_ATOMIC_OR_FETCH_4:
6706 case BUILT_IN_ATOMIC_OR_FETCH_8:
6707 case BUILT_IN_ATOMIC_OR_FETCH_16:
6709 enum built_in_function lib;
6710 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6711 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6712 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6713 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6714 ignore, lib);
6715 if (target)
6716 return target;
6717 break;
6719 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6720 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6721 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6722 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6723 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6724 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6725 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6726 ignore, BUILT_IN_NONE);
6727 if (target)
6728 return target;
6729 break;
6731 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6732 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6733 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6734 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6735 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6736 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6737 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6738 ignore, BUILT_IN_NONE);
6739 if (target)
6740 return target;
6741 break;
6743 case BUILT_IN_ATOMIC_FETCH_AND_1:
6744 case BUILT_IN_ATOMIC_FETCH_AND_2:
6745 case BUILT_IN_ATOMIC_FETCH_AND_4:
6746 case BUILT_IN_ATOMIC_FETCH_AND_8:
6747 case BUILT_IN_ATOMIC_FETCH_AND_16:
6748 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6749 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6750 ignore, BUILT_IN_NONE);
6751 if (target)
6752 return target;
6753 break;
6755 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6756 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6757 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6758 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6759 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6760 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6761 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6762 ignore, BUILT_IN_NONE);
6763 if (target)
6764 return target;
6765 break;
6767 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6768 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6769 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6770 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6771 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6772 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6773 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6774 ignore, BUILT_IN_NONE);
6775 if (target)
6776 return target;
6777 break;
6779 case BUILT_IN_ATOMIC_FETCH_OR_1:
6780 case BUILT_IN_ATOMIC_FETCH_OR_2:
6781 case BUILT_IN_ATOMIC_FETCH_OR_4:
6782 case BUILT_IN_ATOMIC_FETCH_OR_8:
6783 case BUILT_IN_ATOMIC_FETCH_OR_16:
6784 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6785 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6786 ignore, BUILT_IN_NONE);
6787 if (target)
6788 return target;
6789 break;
6791 case BUILT_IN_ATOMIC_TEST_AND_SET:
6792 return expand_builtin_atomic_test_and_set (exp, target);
6794 case BUILT_IN_ATOMIC_CLEAR:
6795 return expand_builtin_atomic_clear (exp);
6797 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6798 return expand_builtin_atomic_always_lock_free (exp);
6800 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6801 target = expand_builtin_atomic_is_lock_free (exp);
6802 if (target)
6803 return target;
6804 break;
6806 case BUILT_IN_ATOMIC_THREAD_FENCE:
6807 expand_builtin_atomic_thread_fence (exp);
6808 return const0_rtx;
6810 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6811 expand_builtin_atomic_signal_fence (exp);
6812 return const0_rtx;
6814 case BUILT_IN_OBJECT_SIZE:
6815 return expand_builtin_object_size (exp);
6817 case BUILT_IN_MEMCPY_CHK:
6818 case BUILT_IN_MEMPCPY_CHK:
6819 case BUILT_IN_MEMMOVE_CHK:
6820 case BUILT_IN_MEMSET_CHK:
6821 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6822 if (target)
6823 return target;
6824 break;
6826 case BUILT_IN_STRCPY_CHK:
6827 case BUILT_IN_STPCPY_CHK:
6828 case BUILT_IN_STRNCPY_CHK:
6829 case BUILT_IN_STPNCPY_CHK:
6830 case BUILT_IN_STRCAT_CHK:
6831 case BUILT_IN_STRNCAT_CHK:
6832 case BUILT_IN_SNPRINTF_CHK:
6833 case BUILT_IN_VSNPRINTF_CHK:
6834 maybe_emit_chk_warning (exp, fcode);
6835 break;
6837 case BUILT_IN_SPRINTF_CHK:
6838 case BUILT_IN_VSPRINTF_CHK:
6839 maybe_emit_sprintf_chk_warning (exp, fcode);
6840 break;
6842 case BUILT_IN_FREE:
6843 if (warn_free_nonheap_object)
6844 maybe_emit_free_warning (exp);
6845 break;
6847 case BUILT_IN_THREAD_POINTER:
6848 return expand_builtin_thread_pointer (exp, target);
6850 case BUILT_IN_SET_THREAD_POINTER:
6851 expand_builtin_set_thread_pointer (exp);
6852 return const0_rtx;
6854 case BUILT_IN_CILK_DETACH:
6855 expand_builtin_cilk_detach (exp);
6856 return const0_rtx;
6858 case BUILT_IN_CILK_POP_FRAME:
6859 expand_builtin_cilk_pop_frame (exp);
6860 return const0_rtx;
6862 case BUILT_IN_ACC_ON_DEVICE:
6863 target = expand_builtin_acc_on_device (exp, target);
6864 if (target)
6865 return target;
6866 break;
6868 default: /* just do library call, if unknown builtin */
6869 break;
6872 /* The switch statement above can drop through to cause the function
6873 to be called normally. */
6874 return expand_call (exp, target, ignore);
6877 /* Determine whether a tree node represents a call to a built-in
6878 function. If the tree T is a call to a built-in function with
6879 the right number of arguments of the appropriate types, return
6880 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6881 Otherwise the return value is END_BUILTINS. */
6883 enum built_in_function
6884 builtin_mathfn_code (const_tree t)
6886 const_tree fndecl, arg, parmlist;
6887 const_tree argtype, parmtype;
6888 const_call_expr_arg_iterator iter;
6890 if (TREE_CODE (t) != CALL_EXPR
6891 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6892 return END_BUILTINS;
6894 fndecl = get_callee_fndecl (t);
6895 if (fndecl == NULL_TREE
6896 || TREE_CODE (fndecl) != FUNCTION_DECL
6897 || ! DECL_BUILT_IN (fndecl)
6898 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6899 return END_BUILTINS;
6901 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6902 init_const_call_expr_arg_iterator (t, &iter);
6903 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6905 /* If a function doesn't take a variable number of arguments,
6906 the last element in the list will have type `void'. */
6907 parmtype = TREE_VALUE (parmlist);
6908 if (VOID_TYPE_P (parmtype))
6910 if (more_const_call_expr_args_p (&iter))
6911 return END_BUILTINS;
6912 return DECL_FUNCTION_CODE (fndecl);
6915 if (! more_const_call_expr_args_p (&iter))
6916 return END_BUILTINS;
6918 arg = next_const_call_expr_arg (&iter);
6919 argtype = TREE_TYPE (arg);
6921 if (SCALAR_FLOAT_TYPE_P (parmtype))
6923 if (! SCALAR_FLOAT_TYPE_P (argtype))
6924 return END_BUILTINS;
6926 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6928 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6929 return END_BUILTINS;
6931 else if (POINTER_TYPE_P (parmtype))
6933 if (! POINTER_TYPE_P (argtype))
6934 return END_BUILTINS;
6936 else if (INTEGRAL_TYPE_P (parmtype))
6938 if (! INTEGRAL_TYPE_P (argtype))
6939 return END_BUILTINS;
6941 else
6942 return END_BUILTINS;
6945 /* Variable-length argument list. */
6946 return DECL_FUNCTION_CODE (fndecl);
6949 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6950 evaluate to a constant. */
6952 static tree
6953 fold_builtin_constant_p (tree arg)
6955 /* We return 1 for a numeric type that's known to be a constant
6956 value at compile-time or for an aggregate type that's a
6957 literal constant. */
6958 STRIP_NOPS (arg);
6960 /* If we know this is a constant, emit the constant of one. */
6961 if (CONSTANT_CLASS_P (arg)
6962 || (TREE_CODE (arg) == CONSTRUCTOR
6963 && TREE_CONSTANT (arg)))
6964 return integer_one_node;
6965 if (TREE_CODE (arg) == ADDR_EXPR)
6967 tree op = TREE_OPERAND (arg, 0);
6968 if (TREE_CODE (op) == STRING_CST
6969 || (TREE_CODE (op) == ARRAY_REF
6970 && integer_zerop (TREE_OPERAND (op, 1))
6971 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6972 return integer_one_node;
6975 /* If this expression has side effects, show we don't know it to be a
6976 constant. Likewise if it's a pointer or aggregate type since in
6977 those case we only want literals, since those are only optimized
6978 when generating RTL, not later.
6979 And finally, if we are compiling an initializer, not code, we
6980 need to return a definite result now; there's not going to be any
6981 more optimization done. */
6982 if (TREE_SIDE_EFFECTS (arg)
6983 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6984 || POINTER_TYPE_P (TREE_TYPE (arg))
6985 || cfun == 0
6986 || folding_initializer
6987 || force_folding_builtin_constant_p)
6988 return integer_zero_node;
6990 return NULL_TREE;
6993 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6994 return it as a truthvalue. */
6996 static tree
6997 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
6998 tree predictor)
7000 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7002 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7003 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7004 ret_type = TREE_TYPE (TREE_TYPE (fn));
7005 pred_type = TREE_VALUE (arg_types);
7006 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7008 pred = fold_convert_loc (loc, pred_type, pred);
7009 expected = fold_convert_loc (loc, expected_type, expected);
7010 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7011 predictor);
7013 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7014 build_int_cst (ret_type, 0));
7017 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7018 NULL_TREE if no simplification is possible. */
7020 tree
7021 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7023 tree inner, fndecl, inner_arg0;
7024 enum tree_code code;
7026 /* Distribute the expected value over short-circuiting operators.
7027 See through the cast from truthvalue_type_node to long. */
7028 inner_arg0 = arg0;
7029 while (TREE_CODE (inner_arg0) == NOP_EXPR
7030 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7031 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7032 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7034 /* If this is a builtin_expect within a builtin_expect keep the
7035 inner one. See through a comparison against a constant. It
7036 might have been added to create a thruthvalue. */
7037 inner = inner_arg0;
7039 if (COMPARISON_CLASS_P (inner)
7040 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7041 inner = TREE_OPERAND (inner, 0);
7043 if (TREE_CODE (inner) == CALL_EXPR
7044 && (fndecl = get_callee_fndecl (inner))
7045 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7046 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7047 return arg0;
7049 inner = inner_arg0;
7050 code = TREE_CODE (inner);
7051 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7053 tree op0 = TREE_OPERAND (inner, 0);
7054 tree op1 = TREE_OPERAND (inner, 1);
7056 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7057 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7058 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7060 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7063 /* If the argument isn't invariant then there's nothing else we can do. */
7064 if (!TREE_CONSTANT (inner_arg0))
7065 return NULL_TREE;
7067 /* If we expect that a comparison against the argument will fold to
7068 a constant return the constant. In practice, this means a true
7069 constant or the address of a non-weak symbol. */
7070 inner = inner_arg0;
7071 STRIP_NOPS (inner);
7072 if (TREE_CODE (inner) == ADDR_EXPR)
7076 inner = TREE_OPERAND (inner, 0);
7078 while (TREE_CODE (inner) == COMPONENT_REF
7079 || TREE_CODE (inner) == ARRAY_REF);
7080 if ((TREE_CODE (inner) == VAR_DECL
7081 || TREE_CODE (inner) == FUNCTION_DECL)
7082 && DECL_WEAK (inner))
7083 return NULL_TREE;
7086 /* Otherwise, ARG0 already has the proper type for the return value. */
7087 return arg0;
7090 /* Fold a call to __builtin_classify_type with argument ARG. */
7092 static tree
7093 fold_builtin_classify_type (tree arg)
7095 if (arg == 0)
7096 return build_int_cst (integer_type_node, no_type_class);
7098 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7101 /* Fold a call to __builtin_strlen with argument ARG. */
7103 static tree
7104 fold_builtin_strlen (location_t loc, tree type, tree arg)
7106 if (!validate_arg (arg, POINTER_TYPE))
7107 return NULL_TREE;
7108 else
7110 tree len = c_strlen (arg, 0);
7112 if (len)
7113 return fold_convert_loc (loc, type, len);
7115 return NULL_TREE;
7119 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7121 static tree
7122 fold_builtin_inf (location_t loc, tree type, int warn)
7124 REAL_VALUE_TYPE real;
7126 /* __builtin_inff is intended to be usable to define INFINITY on all
7127 targets. If an infinity is not available, INFINITY expands "to a
7128 positive constant of type float that overflows at translation
7129 time", footnote "In this case, using INFINITY will violate the
7130 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7131 Thus we pedwarn to ensure this constraint violation is
7132 diagnosed. */
7133 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7134 pedwarn (loc, 0, "target format does not support infinity");
7136 real_inf (&real);
7137 return build_real (type, real);
7140 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7142 static tree
7143 fold_builtin_nan (tree arg, tree type, int quiet)
7145 REAL_VALUE_TYPE real;
7146 const char *str;
7148 if (!validate_arg (arg, POINTER_TYPE))
7149 return NULL_TREE;
7150 str = c_getstr (arg);
7151 if (!str)
7152 return NULL_TREE;
7154 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7155 return NULL_TREE;
7157 return build_real (type, real);
7160 /* Return true if the floating point expression T has an integer value.
7161 We also allow +Inf, -Inf and NaN to be considered integer values. */
7163 static bool
7164 integer_valued_real_p (tree t)
7166 switch (TREE_CODE (t))
7168 case FLOAT_EXPR:
7169 return true;
7171 case ABS_EXPR:
7172 case SAVE_EXPR:
7173 return integer_valued_real_p (TREE_OPERAND (t, 0));
7175 case COMPOUND_EXPR:
7176 case MODIFY_EXPR:
7177 case BIND_EXPR:
7178 return integer_valued_real_p (TREE_OPERAND (t, 1));
7180 case PLUS_EXPR:
7181 case MINUS_EXPR:
7182 case MULT_EXPR:
7183 case MIN_EXPR:
7184 case MAX_EXPR:
7185 return integer_valued_real_p (TREE_OPERAND (t, 0))
7186 && integer_valued_real_p (TREE_OPERAND (t, 1));
7188 case COND_EXPR:
7189 return integer_valued_real_p (TREE_OPERAND (t, 1))
7190 && integer_valued_real_p (TREE_OPERAND (t, 2));
7192 case REAL_CST:
7193 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7195 case NOP_EXPR:
7197 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7198 if (TREE_CODE (type) == INTEGER_TYPE)
7199 return true;
7200 if (TREE_CODE (type) == REAL_TYPE)
7201 return integer_valued_real_p (TREE_OPERAND (t, 0));
7202 break;
7205 case CALL_EXPR:
7206 switch (builtin_mathfn_code (t))
7208 CASE_FLT_FN (BUILT_IN_CEIL):
7209 CASE_FLT_FN (BUILT_IN_FLOOR):
7210 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7211 CASE_FLT_FN (BUILT_IN_RINT):
7212 CASE_FLT_FN (BUILT_IN_ROUND):
7213 CASE_FLT_FN (BUILT_IN_TRUNC):
7214 return true;
7216 CASE_FLT_FN (BUILT_IN_FMIN):
7217 CASE_FLT_FN (BUILT_IN_FMAX):
7218 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7219 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7221 default:
7222 break;
7224 break;
7226 default:
7227 break;
7229 return false;
7232 /* FNDECL is assumed to be a builtin where truncation can be propagated
7233 across (for instance floor((double)f) == (double)floorf (f).
7234 Do the transformation for a call with argument ARG. */
7236 static tree
7237 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7239 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7241 if (!validate_arg (arg, REAL_TYPE))
7242 return NULL_TREE;
7244 /* Integer rounding functions are idempotent. */
7245 if (fcode == builtin_mathfn_code (arg))
7246 return arg;
7248 /* If argument is already integer valued, and we don't need to worry
7249 about setting errno, there's no need to perform rounding. */
7250 if (! flag_errno_math && integer_valued_real_p (arg))
7251 return arg;
7253 if (optimize)
7255 tree arg0 = strip_float_extensions (arg);
7256 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7257 tree newtype = TREE_TYPE (arg0);
7258 tree decl;
7260 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7261 && (decl = mathfn_built_in (newtype, fcode)))
7262 return fold_convert_loc (loc, ftype,
7263 build_call_expr_loc (loc, decl, 1,
7264 fold_convert_loc (loc,
7265 newtype,
7266 arg0)));
7268 return NULL_TREE;
7271 /* FNDECL is assumed to be builtin which can narrow the FP type of
7272 the argument, for instance lround((double)f) -> lroundf (f).
7273 Do the transformation for a call with argument ARG. */
7275 static tree
7276 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7278 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7280 if (!validate_arg (arg, REAL_TYPE))
7281 return NULL_TREE;
7283 /* If argument is already integer valued, and we don't need to worry
7284 about setting errno, there's no need to perform rounding. */
7285 if (! flag_errno_math && integer_valued_real_p (arg))
7286 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7287 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7289 if (optimize)
7291 tree ftype = TREE_TYPE (arg);
7292 tree arg0 = strip_float_extensions (arg);
7293 tree newtype = TREE_TYPE (arg0);
7294 tree decl;
7296 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7297 && (decl = mathfn_built_in (newtype, fcode)))
7298 return build_call_expr_loc (loc, decl, 1,
7299 fold_convert_loc (loc, newtype, arg0));
7302 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7303 sizeof (int) == sizeof (long). */
7304 if (TYPE_PRECISION (integer_type_node)
7305 == TYPE_PRECISION (long_integer_type_node))
7307 tree newfn = NULL_TREE;
7308 switch (fcode)
7310 CASE_FLT_FN (BUILT_IN_ICEIL):
7311 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7312 break;
7314 CASE_FLT_FN (BUILT_IN_IFLOOR):
7315 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7316 break;
7318 CASE_FLT_FN (BUILT_IN_IROUND):
7319 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7320 break;
7322 CASE_FLT_FN (BUILT_IN_IRINT):
7323 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7324 break;
7326 default:
7327 break;
7330 if (newfn)
7332 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7333 return fold_convert_loc (loc,
7334 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7338 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7339 sizeof (long long) == sizeof (long). */
7340 if (TYPE_PRECISION (long_long_integer_type_node)
7341 == TYPE_PRECISION (long_integer_type_node))
7343 tree newfn = NULL_TREE;
7344 switch (fcode)
7346 CASE_FLT_FN (BUILT_IN_LLCEIL):
7347 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7348 break;
7350 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7351 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7352 break;
7354 CASE_FLT_FN (BUILT_IN_LLROUND):
7355 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7356 break;
7358 CASE_FLT_FN (BUILT_IN_LLRINT):
7359 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7360 break;
7362 default:
7363 break;
7366 if (newfn)
7368 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7369 return fold_convert_loc (loc,
7370 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7374 return NULL_TREE;
7377 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7378 return type. Return NULL_TREE if no simplification can be made. */
7380 static tree
7381 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7383 tree res;
7385 if (!validate_arg (arg, COMPLEX_TYPE)
7386 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7387 return NULL_TREE;
7389 /* Calculate the result when the argument is a constant. */
7390 if (TREE_CODE (arg) == COMPLEX_CST
7391 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7392 type, mpfr_hypot)))
7393 return res;
7395 if (TREE_CODE (arg) == COMPLEX_EXPR)
7397 tree real = TREE_OPERAND (arg, 0);
7398 tree imag = TREE_OPERAND (arg, 1);
7400 /* If either part is zero, cabs is fabs of the other. */
7401 if (real_zerop (real))
7402 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7403 if (real_zerop (imag))
7404 return fold_build1_loc (loc, ABS_EXPR, type, real);
7406 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7407 if (flag_unsafe_math_optimizations
7408 && operand_equal_p (real, imag, OEP_PURE_SAME))
7410 const REAL_VALUE_TYPE sqrt2_trunc
7411 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7412 STRIP_NOPS (real);
7413 return fold_build2_loc (loc, MULT_EXPR, type,
7414 fold_build1_loc (loc, ABS_EXPR, type, real),
7415 build_real (type, sqrt2_trunc));
7419 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7420 if (TREE_CODE (arg) == NEGATE_EXPR
7421 || TREE_CODE (arg) == CONJ_EXPR)
7422 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7424 /* Don't do this when optimizing for size. */
7425 if (flag_unsafe_math_optimizations
7426 && optimize && optimize_function_for_speed_p (cfun))
7428 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7430 if (sqrtfn != NULL_TREE)
7432 tree rpart, ipart, result;
7434 arg = builtin_save_expr (arg);
7436 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7437 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7439 rpart = builtin_save_expr (rpart);
7440 ipart = builtin_save_expr (ipart);
7442 result = fold_build2_loc (loc, PLUS_EXPR, type,
7443 fold_build2_loc (loc, MULT_EXPR, type,
7444 rpart, rpart),
7445 fold_build2_loc (loc, MULT_EXPR, type,
7446 ipart, ipart));
7448 return build_call_expr_loc (loc, sqrtfn, 1, result);
7452 return NULL_TREE;
7455 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7456 complex tree type of the result. If NEG is true, the imaginary
7457 zero is negative. */
7459 static tree
7460 build_complex_cproj (tree type, bool neg)
7462 REAL_VALUE_TYPE rinf, rzero = dconst0;
7464 real_inf (&rinf);
7465 rzero.sign = neg;
7466 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7467 build_real (TREE_TYPE (type), rzero));
7470 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7471 return type. Return NULL_TREE if no simplification can be made. */
7473 static tree
7474 fold_builtin_cproj (location_t loc, tree arg, tree type)
7476 if (!validate_arg (arg, COMPLEX_TYPE)
7477 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7478 return NULL_TREE;
7480 /* If there are no infinities, return arg. */
7481 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7482 return non_lvalue_loc (loc, arg);
7484 /* Calculate the result when the argument is a constant. */
7485 if (TREE_CODE (arg) == COMPLEX_CST)
7487 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7488 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7490 if (real_isinf (real) || real_isinf (imag))
7491 return build_complex_cproj (type, imag->sign);
7492 else
7493 return arg;
7495 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7497 tree real = TREE_OPERAND (arg, 0);
7498 tree imag = TREE_OPERAND (arg, 1);
7500 STRIP_NOPS (real);
7501 STRIP_NOPS (imag);
7503 /* If the real part is inf and the imag part is known to be
7504 nonnegative, return (inf + 0i). Remember side-effects are
7505 possible in the imag part. */
7506 if (TREE_CODE (real) == REAL_CST
7507 && real_isinf (TREE_REAL_CST_PTR (real))
7508 && tree_expr_nonnegative_p (imag))
7509 return omit_one_operand_loc (loc, type,
7510 build_complex_cproj (type, false),
7511 arg);
7513 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7514 Remember side-effects are possible in the real part. */
7515 if (TREE_CODE (imag) == REAL_CST
7516 && real_isinf (TREE_REAL_CST_PTR (imag)))
7517 return
7518 omit_one_operand_loc (loc, type,
7519 build_complex_cproj (type, TREE_REAL_CST_PTR
7520 (imag)->sign), arg);
7523 return NULL_TREE;
7526 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7527 Return NULL_TREE if no simplification can be made. */
7529 static tree
7530 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7533 enum built_in_function fcode;
7534 tree res;
7536 if (!validate_arg (arg, REAL_TYPE))
7537 return NULL_TREE;
7539 /* Calculate the result when the argument is a constant. */
7540 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7541 return res;
7543 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7544 fcode = builtin_mathfn_code (arg);
7545 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7547 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7548 arg = fold_build2_loc (loc, MULT_EXPR, type,
7549 CALL_EXPR_ARG (arg, 0),
7550 build_real (type, dconsthalf));
7551 return build_call_expr_loc (loc, expfn, 1, arg);
7554 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7555 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7557 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7559 if (powfn)
7561 tree arg0 = CALL_EXPR_ARG (arg, 0);
7562 tree tree_root;
7563 /* The inner root was either sqrt or cbrt. */
7564 /* This was a conditional expression but it triggered a bug
7565 in Sun C 5.5. */
7566 REAL_VALUE_TYPE dconstroot;
7567 if (BUILTIN_SQRT_P (fcode))
7568 dconstroot = dconsthalf;
7569 else
7570 dconstroot = dconst_third ();
7572 /* Adjust for the outer root. */
7573 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7574 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7575 tree_root = build_real (type, dconstroot);
7576 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7580 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7581 if (flag_unsafe_math_optimizations
7582 && (fcode == BUILT_IN_POW
7583 || fcode == BUILT_IN_POWF
7584 || fcode == BUILT_IN_POWL))
7586 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7587 tree arg0 = CALL_EXPR_ARG (arg, 0);
7588 tree arg1 = CALL_EXPR_ARG (arg, 1);
7589 tree narg1;
7590 if (!tree_expr_nonnegative_p (arg0))
7591 arg0 = build1 (ABS_EXPR, type, arg0);
7592 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7593 build_real (type, dconsthalf));
7594 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7597 return NULL_TREE;
7600 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7601 Return NULL_TREE if no simplification can be made. */
7603 static tree
7604 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7606 const enum built_in_function fcode = builtin_mathfn_code (arg);
7607 tree res;
7609 if (!validate_arg (arg, REAL_TYPE))
7610 return NULL_TREE;
7612 /* Calculate the result when the argument is a constant. */
7613 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7614 return res;
7616 if (flag_unsafe_math_optimizations)
7618 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7619 if (BUILTIN_EXPONENT_P (fcode))
7621 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7622 const REAL_VALUE_TYPE third_trunc =
7623 real_value_truncate (TYPE_MODE (type), dconst_third ());
7624 arg = fold_build2_loc (loc, MULT_EXPR, type,
7625 CALL_EXPR_ARG (arg, 0),
7626 build_real (type, third_trunc));
7627 return build_call_expr_loc (loc, expfn, 1, arg);
7630 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7631 if (BUILTIN_SQRT_P (fcode))
7633 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7635 if (powfn)
7637 tree arg0 = CALL_EXPR_ARG (arg, 0);
7638 tree tree_root;
7639 REAL_VALUE_TYPE dconstroot = dconst_third ();
7641 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7642 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7643 tree_root = build_real (type, dconstroot);
7644 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7648 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7649 if (BUILTIN_CBRT_P (fcode))
7651 tree arg0 = CALL_EXPR_ARG (arg, 0);
7652 if (tree_expr_nonnegative_p (arg0))
7654 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7656 if (powfn)
7658 tree tree_root;
7659 REAL_VALUE_TYPE dconstroot;
7661 real_arithmetic (&dconstroot, MULT_EXPR,
7662 dconst_third_ptr (), dconst_third_ptr ());
7663 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7664 tree_root = build_real (type, dconstroot);
7665 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7670 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7671 if (fcode == BUILT_IN_POW
7672 || fcode == BUILT_IN_POWF
7673 || fcode == BUILT_IN_POWL)
7675 tree arg00 = CALL_EXPR_ARG (arg, 0);
7676 tree arg01 = CALL_EXPR_ARG (arg, 1);
7677 if (tree_expr_nonnegative_p (arg00))
7679 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7680 const REAL_VALUE_TYPE dconstroot
7681 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7682 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7683 build_real (type, dconstroot));
7684 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7688 return NULL_TREE;
7691 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7692 TYPE is the type of the return value. Return NULL_TREE if no
7693 simplification can be made. */
7695 static tree
7696 fold_builtin_cos (location_t loc,
7697 tree arg, tree type, tree fndecl)
7699 tree res, narg;
7701 if (!validate_arg (arg, REAL_TYPE))
7702 return NULL_TREE;
7704 /* Calculate the result when the argument is a constant. */
7705 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7706 return res;
7708 /* Optimize cos(-x) into cos (x). */
7709 if ((narg = fold_strip_sign_ops (arg)))
7710 return build_call_expr_loc (loc, fndecl, 1, narg);
7712 return NULL_TREE;
7715 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7716 Return NULL_TREE if no simplification can be made. */
7718 static tree
7719 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7721 if (validate_arg (arg, REAL_TYPE))
7723 tree res, narg;
7725 /* Calculate the result when the argument is a constant. */
7726 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7727 return res;
7729 /* Optimize cosh(-x) into cosh (x). */
7730 if ((narg = fold_strip_sign_ops (arg)))
7731 return build_call_expr_loc (loc, fndecl, 1, narg);
7734 return NULL_TREE;
7737 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7738 argument ARG. TYPE is the type of the return value. Return
7739 NULL_TREE if no simplification can be made. */
7741 static tree
7742 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7743 bool hyper)
7745 if (validate_arg (arg, COMPLEX_TYPE)
7746 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7748 tree tmp;
7750 /* Calculate the result when the argument is a constant. */
7751 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7752 return tmp;
7754 /* Optimize fn(-x) into fn(x). */
7755 if ((tmp = fold_strip_sign_ops (arg)))
7756 return build_call_expr_loc (loc, fndecl, 1, tmp);
7759 return NULL_TREE;
7762 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7763 Return NULL_TREE if no simplification can be made. */
7765 static tree
7766 fold_builtin_tan (tree arg, tree type)
7768 enum built_in_function fcode;
7769 tree res;
7771 if (!validate_arg (arg, REAL_TYPE))
7772 return NULL_TREE;
7774 /* Calculate the result when the argument is a constant. */
7775 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7776 return res;
7778 /* Optimize tan(atan(x)) = x. */
7779 fcode = builtin_mathfn_code (arg);
7780 if (flag_unsafe_math_optimizations
7781 && (fcode == BUILT_IN_ATAN
7782 || fcode == BUILT_IN_ATANF
7783 || fcode == BUILT_IN_ATANL))
7784 return CALL_EXPR_ARG (arg, 0);
7786 return NULL_TREE;
7789 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7790 NULL_TREE if no simplification can be made. */
7792 static tree
7793 fold_builtin_sincos (location_t loc,
7794 tree arg0, tree arg1, tree arg2)
7796 tree type;
7797 tree res, fn, call;
7799 if (!validate_arg (arg0, REAL_TYPE)
7800 || !validate_arg (arg1, POINTER_TYPE)
7801 || !validate_arg (arg2, POINTER_TYPE))
7802 return NULL_TREE;
7804 type = TREE_TYPE (arg0);
7806 /* Calculate the result when the argument is a constant. */
7807 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7808 return res;
7810 /* Canonicalize sincos to cexpi. */
7811 if (!targetm.libc_has_function (function_c99_math_complex))
7812 return NULL_TREE;
7813 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7814 if (!fn)
7815 return NULL_TREE;
7817 call = build_call_expr_loc (loc, fn, 1, arg0);
7818 call = builtin_save_expr (call);
7820 return build2 (COMPOUND_EXPR, void_type_node,
7821 build2 (MODIFY_EXPR, void_type_node,
7822 build_fold_indirect_ref_loc (loc, arg1),
7823 build1 (IMAGPART_EXPR, type, call)),
7824 build2 (MODIFY_EXPR, void_type_node,
7825 build_fold_indirect_ref_loc (loc, arg2),
7826 build1 (REALPART_EXPR, type, call)));
7829 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7830 NULL_TREE if no simplification can be made. */
7832 static tree
7833 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7835 tree rtype;
7836 tree realp, imagp, ifn;
7837 tree res;
7839 if (!validate_arg (arg0, COMPLEX_TYPE)
7840 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7841 return NULL_TREE;
7843 /* Calculate the result when the argument is a constant. */
7844 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7845 return res;
7847 rtype = TREE_TYPE (TREE_TYPE (arg0));
7849 /* In case we can figure out the real part of arg0 and it is constant zero
7850 fold to cexpi. */
7851 if (!targetm.libc_has_function (function_c99_math_complex))
7852 return NULL_TREE;
7853 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7854 if (!ifn)
7855 return NULL_TREE;
7857 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7858 && real_zerop (realp))
7860 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7861 return build_call_expr_loc (loc, ifn, 1, narg);
7864 /* In case we can easily decompose real and imaginary parts split cexp
7865 to exp (r) * cexpi (i). */
7866 if (flag_unsafe_math_optimizations
7867 && realp)
7869 tree rfn, rcall, icall;
7871 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7872 if (!rfn)
7873 return NULL_TREE;
7875 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7876 if (!imagp)
7877 return NULL_TREE;
7879 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7880 icall = builtin_save_expr (icall);
7881 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7882 rcall = builtin_save_expr (rcall);
7883 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7884 fold_build2_loc (loc, MULT_EXPR, rtype,
7885 rcall,
7886 fold_build1_loc (loc, REALPART_EXPR,
7887 rtype, icall)),
7888 fold_build2_loc (loc, MULT_EXPR, rtype,
7889 rcall,
7890 fold_build1_loc (loc, IMAGPART_EXPR,
7891 rtype, icall)));
7894 return NULL_TREE;
7897 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7898 Return NULL_TREE if no simplification can be made. */
7900 static tree
7901 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7903 if (!validate_arg (arg, REAL_TYPE))
7904 return NULL_TREE;
7906 /* Optimize trunc of constant value. */
7907 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7909 REAL_VALUE_TYPE r, x;
7910 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7912 x = TREE_REAL_CST (arg);
7913 real_trunc (&r, TYPE_MODE (type), &x);
7914 return build_real (type, r);
7917 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7920 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7921 Return NULL_TREE if no simplification can be made. */
7923 static tree
7924 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7926 if (!validate_arg (arg, REAL_TYPE))
7927 return NULL_TREE;
7929 /* Optimize floor of constant value. */
7930 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7932 REAL_VALUE_TYPE x;
7934 x = TREE_REAL_CST (arg);
7935 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7937 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7938 REAL_VALUE_TYPE r;
7940 real_floor (&r, TYPE_MODE (type), &x);
7941 return build_real (type, r);
7945 /* Fold floor (x) where x is nonnegative to trunc (x). */
7946 if (tree_expr_nonnegative_p (arg))
7948 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7949 if (truncfn)
7950 return build_call_expr_loc (loc, truncfn, 1, arg);
7953 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7956 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7957 Return NULL_TREE if no simplification can be made. */
7959 static tree
7960 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7962 if (!validate_arg (arg, REAL_TYPE))
7963 return NULL_TREE;
7965 /* Optimize ceil of constant value. */
7966 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7968 REAL_VALUE_TYPE x;
7970 x = TREE_REAL_CST (arg);
7971 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7973 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7974 REAL_VALUE_TYPE r;
7976 real_ceil (&r, TYPE_MODE (type), &x);
7977 return build_real (type, r);
7981 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7984 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7985 Return NULL_TREE if no simplification can be made. */
7987 static tree
7988 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7990 if (!validate_arg (arg, REAL_TYPE))
7991 return NULL_TREE;
7993 /* Optimize round of constant value. */
7994 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7996 REAL_VALUE_TYPE x;
7998 x = TREE_REAL_CST (arg);
7999 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8001 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8002 REAL_VALUE_TYPE r;
8004 real_round (&r, TYPE_MODE (type), &x);
8005 return build_real (type, r);
8009 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8012 /* Fold function call to builtin lround, lroundf or lroundl (or the
8013 corresponding long long versions) and other rounding functions. ARG
8014 is the argument to the call. Return NULL_TREE if no simplification
8015 can be made. */
8017 static tree
8018 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8020 if (!validate_arg (arg, REAL_TYPE))
8021 return NULL_TREE;
8023 /* Optimize lround of constant value. */
8024 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8026 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8028 if (real_isfinite (&x))
8030 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8031 tree ftype = TREE_TYPE (arg);
8032 REAL_VALUE_TYPE r;
8033 bool fail = false;
8035 switch (DECL_FUNCTION_CODE (fndecl))
8037 CASE_FLT_FN (BUILT_IN_IFLOOR):
8038 CASE_FLT_FN (BUILT_IN_LFLOOR):
8039 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8040 real_floor (&r, TYPE_MODE (ftype), &x);
8041 break;
8043 CASE_FLT_FN (BUILT_IN_ICEIL):
8044 CASE_FLT_FN (BUILT_IN_LCEIL):
8045 CASE_FLT_FN (BUILT_IN_LLCEIL):
8046 real_ceil (&r, TYPE_MODE (ftype), &x);
8047 break;
8049 CASE_FLT_FN (BUILT_IN_IROUND):
8050 CASE_FLT_FN (BUILT_IN_LROUND):
8051 CASE_FLT_FN (BUILT_IN_LLROUND):
8052 real_round (&r, TYPE_MODE (ftype), &x);
8053 break;
8055 default:
8056 gcc_unreachable ();
8059 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8060 if (!fail)
8061 return wide_int_to_tree (itype, val);
8065 switch (DECL_FUNCTION_CODE (fndecl))
8067 CASE_FLT_FN (BUILT_IN_LFLOOR):
8068 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8069 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8070 if (tree_expr_nonnegative_p (arg))
8071 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8072 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8073 break;
8074 default:;
8077 return fold_fixed_mathfn (loc, fndecl, arg);
8080 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8081 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8082 the argument to the call. Return NULL_TREE if no simplification can
8083 be made. */
8085 static tree
8086 fold_builtin_bitop (tree fndecl, tree arg)
8088 if (!validate_arg (arg, INTEGER_TYPE))
8089 return NULL_TREE;
8091 /* Optimize for constant argument. */
8092 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8094 tree type = TREE_TYPE (arg);
8095 int result;
8097 switch (DECL_FUNCTION_CODE (fndecl))
8099 CASE_INT_FN (BUILT_IN_FFS):
8100 result = wi::ffs (arg);
8101 break;
8103 CASE_INT_FN (BUILT_IN_CLZ):
8104 if (wi::ne_p (arg, 0))
8105 result = wi::clz (arg);
8106 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8107 result = TYPE_PRECISION (type);
8108 break;
8110 CASE_INT_FN (BUILT_IN_CTZ):
8111 if (wi::ne_p (arg, 0))
8112 result = wi::ctz (arg);
8113 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8114 result = TYPE_PRECISION (type);
8115 break;
8117 CASE_INT_FN (BUILT_IN_CLRSB):
8118 result = wi::clrsb (arg);
8119 break;
8121 CASE_INT_FN (BUILT_IN_POPCOUNT):
8122 result = wi::popcount (arg);
8123 break;
8125 CASE_INT_FN (BUILT_IN_PARITY):
8126 result = wi::parity (arg);
8127 break;
8129 default:
8130 gcc_unreachable ();
8133 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8136 return NULL_TREE;
8139 /* Fold function call to builtin_bswap and the short, long and long long
8140 variants. Return NULL_TREE if no simplification can be made. */
8141 static tree
8142 fold_builtin_bswap (tree fndecl, tree arg)
8144 if (! validate_arg (arg, INTEGER_TYPE))
8145 return NULL_TREE;
8147 /* Optimize constant value. */
8148 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8150 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8152 switch (DECL_FUNCTION_CODE (fndecl))
8154 case BUILT_IN_BSWAP16:
8155 case BUILT_IN_BSWAP32:
8156 case BUILT_IN_BSWAP64:
8158 signop sgn = TYPE_SIGN (type);
8159 tree result =
8160 wide_int_to_tree (type,
8161 wide_int::from (arg, TYPE_PRECISION (type),
8162 sgn).bswap ());
8163 return result;
8165 default:
8166 gcc_unreachable ();
8170 return NULL_TREE;
8173 /* A subroutine of fold_builtin to fold the various logarithmic
8174 functions. Return NULL_TREE if no simplification can me made.
8175 FUNC is the corresponding MPFR logarithm function. */
8177 static tree
8178 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8179 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8181 if (validate_arg (arg, REAL_TYPE))
8183 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8184 tree res;
8185 const enum built_in_function fcode = builtin_mathfn_code (arg);
8187 /* Calculate the result when the argument is a constant. */
8188 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8189 return res;
8191 /* Special case, optimize logN(expN(x)) = x. */
8192 if (flag_unsafe_math_optimizations
8193 && ((func == mpfr_log
8194 && (fcode == BUILT_IN_EXP
8195 || fcode == BUILT_IN_EXPF
8196 || fcode == BUILT_IN_EXPL))
8197 || (func == mpfr_log2
8198 && (fcode == BUILT_IN_EXP2
8199 || fcode == BUILT_IN_EXP2F
8200 || fcode == BUILT_IN_EXP2L))
8201 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8202 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8204 /* Optimize logN(func()) for various exponential functions. We
8205 want to determine the value "x" and the power "exponent" in
8206 order to transform logN(x**exponent) into exponent*logN(x). */
8207 if (flag_unsafe_math_optimizations)
8209 tree exponent = 0, x = 0;
8211 switch (fcode)
8213 CASE_FLT_FN (BUILT_IN_EXP):
8214 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8215 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8216 dconst_e ()));
8217 exponent = CALL_EXPR_ARG (arg, 0);
8218 break;
8219 CASE_FLT_FN (BUILT_IN_EXP2):
8220 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8221 x = build_real (type, dconst2);
8222 exponent = CALL_EXPR_ARG (arg, 0);
8223 break;
8224 CASE_FLT_FN (BUILT_IN_EXP10):
8225 CASE_FLT_FN (BUILT_IN_POW10):
8226 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8228 REAL_VALUE_TYPE dconst10;
8229 real_from_integer (&dconst10, VOIDmode, 10, SIGNED);
8230 x = build_real (type, dconst10);
8232 exponent = CALL_EXPR_ARG (arg, 0);
8233 break;
8234 CASE_FLT_FN (BUILT_IN_SQRT):
8235 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8236 x = CALL_EXPR_ARG (arg, 0);
8237 exponent = build_real (type, dconsthalf);
8238 break;
8239 CASE_FLT_FN (BUILT_IN_CBRT):
8240 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8241 x = CALL_EXPR_ARG (arg, 0);
8242 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8243 dconst_third ()));
8244 break;
8245 CASE_FLT_FN (BUILT_IN_POW):
8246 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8247 x = CALL_EXPR_ARG (arg, 0);
8248 exponent = CALL_EXPR_ARG (arg, 1);
8249 break;
8250 default:
8251 break;
8254 /* Now perform the optimization. */
8255 if (x && exponent)
8257 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8258 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8263 return NULL_TREE;
8266 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8267 NULL_TREE if no simplification can be made. */
8269 static tree
8270 fold_builtin_hypot (location_t loc, tree fndecl,
8271 tree arg0, tree arg1, tree type)
8273 tree res, narg0, narg1;
8275 if (!validate_arg (arg0, REAL_TYPE)
8276 || !validate_arg (arg1, REAL_TYPE))
8277 return NULL_TREE;
8279 /* Calculate the result when the argument is a constant. */
8280 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8281 return res;
8283 /* If either argument to hypot has a negate or abs, strip that off.
8284 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8285 narg0 = fold_strip_sign_ops (arg0);
8286 narg1 = fold_strip_sign_ops (arg1);
8287 if (narg0 || narg1)
8289 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8290 narg1 ? narg1 : arg1);
8293 /* If either argument is zero, hypot is fabs of the other. */
8294 if (real_zerop (arg0))
8295 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8296 else if (real_zerop (arg1))
8297 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8299 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8300 if (flag_unsafe_math_optimizations
8301 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8303 const REAL_VALUE_TYPE sqrt2_trunc
8304 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8305 return fold_build2_loc (loc, MULT_EXPR, type,
8306 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8307 build_real (type, sqrt2_trunc));
8310 return NULL_TREE;
8314 /* Fold a builtin function call to pow, powf, or powl. Return
8315 NULL_TREE if no simplification can be made. */
8316 static tree
8317 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8319 tree res;
8321 if (!validate_arg (arg0, REAL_TYPE)
8322 || !validate_arg (arg1, REAL_TYPE))
8323 return NULL_TREE;
8325 /* Calculate the result when the argument is a constant. */
8326 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8327 return res;
8329 /* Optimize pow(1.0,y) = 1.0. */
8330 if (real_onep (arg0))
8331 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8333 if (TREE_CODE (arg1) == REAL_CST
8334 && !TREE_OVERFLOW (arg1))
8336 REAL_VALUE_TYPE cint;
8337 REAL_VALUE_TYPE c;
8338 HOST_WIDE_INT n;
8340 c = TREE_REAL_CST (arg1);
8342 /* Optimize pow(x,0.0) = 1.0. */
8343 if (REAL_VALUES_EQUAL (c, dconst0))
8344 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8345 arg0);
8347 /* Optimize pow(x,1.0) = x. */
8348 if (REAL_VALUES_EQUAL (c, dconst1))
8349 return arg0;
8351 /* Optimize pow(x,-1.0) = 1.0/x. */
8352 if (REAL_VALUES_EQUAL (c, dconstm1))
8353 return fold_build2_loc (loc, RDIV_EXPR, type,
8354 build_real (type, dconst1), arg0);
8356 /* Optimize pow(x,0.5) = sqrt(x). */
8357 if (flag_unsafe_math_optimizations
8358 && REAL_VALUES_EQUAL (c, dconsthalf))
8360 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8362 if (sqrtfn != NULL_TREE)
8363 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8366 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8367 if (flag_unsafe_math_optimizations)
8369 const REAL_VALUE_TYPE dconstroot
8370 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8372 if (REAL_VALUES_EQUAL (c, dconstroot))
8374 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8375 if (cbrtfn != NULL_TREE)
8376 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8380 /* Check for an integer exponent. */
8381 n = real_to_integer (&c);
8382 real_from_integer (&cint, VOIDmode, n, SIGNED);
8383 if (real_identical (&c, &cint))
8385 /* Attempt to evaluate pow at compile-time, unless this should
8386 raise an exception. */
8387 if (TREE_CODE (arg0) == REAL_CST
8388 && !TREE_OVERFLOW (arg0)
8389 && (n > 0
8390 || (!flag_trapping_math && !flag_errno_math)
8391 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8393 REAL_VALUE_TYPE x;
8394 bool inexact;
8396 x = TREE_REAL_CST (arg0);
8397 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8398 if (flag_unsafe_math_optimizations || !inexact)
8399 return build_real (type, x);
8402 /* Strip sign ops from even integer powers. */
8403 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8405 tree narg0 = fold_strip_sign_ops (arg0);
8406 if (narg0)
8407 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8412 if (flag_unsafe_math_optimizations)
8414 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8416 /* Optimize pow(expN(x),y) = expN(x*y). */
8417 if (BUILTIN_EXPONENT_P (fcode))
8419 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8420 tree arg = CALL_EXPR_ARG (arg0, 0);
8421 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8422 return build_call_expr_loc (loc, expfn, 1, arg);
8425 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8426 if (BUILTIN_SQRT_P (fcode))
8428 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8429 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8430 build_real (type, dconsthalf));
8431 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8434 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8435 if (BUILTIN_CBRT_P (fcode))
8437 tree arg = CALL_EXPR_ARG (arg0, 0);
8438 if (tree_expr_nonnegative_p (arg))
8440 const REAL_VALUE_TYPE dconstroot
8441 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8442 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8443 build_real (type, dconstroot));
8444 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8448 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8449 if (fcode == BUILT_IN_POW
8450 || fcode == BUILT_IN_POWF
8451 || fcode == BUILT_IN_POWL)
8453 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8454 if (tree_expr_nonnegative_p (arg00))
8456 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8457 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8458 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8463 return NULL_TREE;
8466 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8467 Return NULL_TREE if no simplification can be made. */
8468 static tree
8469 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8470 tree arg0, tree arg1, tree type)
8472 if (!validate_arg (arg0, REAL_TYPE)
8473 || !validate_arg (arg1, INTEGER_TYPE))
8474 return NULL_TREE;
8476 /* Optimize pow(1.0,y) = 1.0. */
8477 if (real_onep (arg0))
8478 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8480 if (tree_fits_shwi_p (arg1))
8482 HOST_WIDE_INT c = tree_to_shwi (arg1);
8484 /* Evaluate powi at compile-time. */
8485 if (TREE_CODE (arg0) == REAL_CST
8486 && !TREE_OVERFLOW (arg0))
8488 REAL_VALUE_TYPE x;
8489 x = TREE_REAL_CST (arg0);
8490 real_powi (&x, TYPE_MODE (type), &x, c);
8491 return build_real (type, x);
8494 /* Optimize pow(x,0) = 1.0. */
8495 if (c == 0)
8496 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8497 arg0);
8499 /* Optimize pow(x,1) = x. */
8500 if (c == 1)
8501 return arg0;
8503 /* Optimize pow(x,-1) = 1.0/x. */
8504 if (c == -1)
8505 return fold_build2_loc (loc, RDIV_EXPR, type,
8506 build_real (type, dconst1), arg0);
8509 return NULL_TREE;
8512 /* A subroutine of fold_builtin to fold the various exponent
8513 functions. Return NULL_TREE if no simplification can be made.
8514 FUNC is the corresponding MPFR exponent function. */
8516 static tree
8517 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8518 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8520 if (validate_arg (arg, REAL_TYPE))
8522 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8523 tree res;
8525 /* Calculate the result when the argument is a constant. */
8526 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8527 return res;
8529 /* Optimize expN(logN(x)) = x. */
8530 if (flag_unsafe_math_optimizations)
8532 const enum built_in_function fcode = builtin_mathfn_code (arg);
8534 if ((func == mpfr_exp
8535 && (fcode == BUILT_IN_LOG
8536 || fcode == BUILT_IN_LOGF
8537 || fcode == BUILT_IN_LOGL))
8538 || (func == mpfr_exp2
8539 && (fcode == BUILT_IN_LOG2
8540 || fcode == BUILT_IN_LOG2F
8541 || fcode == BUILT_IN_LOG2L))
8542 || (func == mpfr_exp10
8543 && (fcode == BUILT_IN_LOG10
8544 || fcode == BUILT_IN_LOG10F
8545 || fcode == BUILT_IN_LOG10L)))
8546 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8550 return NULL_TREE;
8553 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8554 Return NULL_TREE if no simplification can be made. */
8556 static tree
8557 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8559 tree fn, len, lenp1, call, type;
8561 if (!validate_arg (dest, POINTER_TYPE)
8562 || !validate_arg (src, POINTER_TYPE))
8563 return NULL_TREE;
8565 len = c_strlen (src, 1);
8566 if (!len
8567 || TREE_CODE (len) != INTEGER_CST)
8568 return NULL_TREE;
8570 if (optimize_function_for_size_p (cfun)
8571 /* If length is zero it's small enough. */
8572 && !integer_zerop (len))
8573 return NULL_TREE;
8575 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8576 if (!fn)
8577 return NULL_TREE;
8579 lenp1 = size_binop_loc (loc, PLUS_EXPR,
8580 fold_convert_loc (loc, size_type_node, len),
8581 build_int_cst (size_type_node, 1));
8582 /* We use dest twice in building our expression. Save it from
8583 multiple expansions. */
8584 dest = builtin_save_expr (dest);
8585 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8587 type = TREE_TYPE (TREE_TYPE (fndecl));
8588 dest = fold_build_pointer_plus_loc (loc, dest, len);
8589 dest = fold_convert_loc (loc, type, dest);
8590 dest = omit_one_operand_loc (loc, type, dest, call);
8591 return dest;
8594 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8595 arguments to the call, and TYPE is its return type.
8596 Return NULL_TREE if no simplification can be made. */
8598 static tree
8599 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8601 if (!validate_arg (arg1, POINTER_TYPE)
8602 || !validate_arg (arg2, INTEGER_TYPE)
8603 || !validate_arg (len, INTEGER_TYPE))
8604 return NULL_TREE;
8605 else
8607 const char *p1;
8609 if (TREE_CODE (arg2) != INTEGER_CST
8610 || !tree_fits_uhwi_p (len))
8611 return NULL_TREE;
8613 p1 = c_getstr (arg1);
8614 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8616 char c;
8617 const char *r;
8618 tree tem;
8620 if (target_char_cast (arg2, &c))
8621 return NULL_TREE;
8623 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8625 if (r == NULL)
8626 return build_int_cst (TREE_TYPE (arg1), 0);
8628 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8629 return fold_convert_loc (loc, type, tem);
8631 return NULL_TREE;
8635 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8636 Return NULL_TREE if no simplification can be made. */
8638 static tree
8639 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8641 const char *p1, *p2;
8643 if (!validate_arg (arg1, POINTER_TYPE)
8644 || !validate_arg (arg2, POINTER_TYPE)
8645 || !validate_arg (len, INTEGER_TYPE))
8646 return NULL_TREE;
8648 /* If the LEN parameter is zero, return zero. */
8649 if (integer_zerop (len))
8650 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8651 arg1, arg2);
8653 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8654 if (operand_equal_p (arg1, arg2, 0))
8655 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8657 p1 = c_getstr (arg1);
8658 p2 = c_getstr (arg2);
8660 /* If all arguments are constant, and the value of len is not greater
8661 than the lengths of arg1 and arg2, evaluate at compile-time. */
8662 if (tree_fits_uhwi_p (len) && p1 && p2
8663 && compare_tree_int (len, strlen (p1) + 1) <= 0
8664 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8666 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8668 if (r > 0)
8669 return integer_one_node;
8670 else if (r < 0)
8671 return integer_minus_one_node;
8672 else
8673 return integer_zero_node;
8676 /* If len parameter is one, return an expression corresponding to
8677 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8678 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8680 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8681 tree cst_uchar_ptr_node
8682 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8684 tree ind1
8685 = fold_convert_loc (loc, integer_type_node,
8686 build1 (INDIRECT_REF, cst_uchar_node,
8687 fold_convert_loc (loc,
8688 cst_uchar_ptr_node,
8689 arg1)));
8690 tree ind2
8691 = fold_convert_loc (loc, integer_type_node,
8692 build1 (INDIRECT_REF, cst_uchar_node,
8693 fold_convert_loc (loc,
8694 cst_uchar_ptr_node,
8695 arg2)));
8696 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8699 return NULL_TREE;
8702 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8703 Return NULL_TREE if no simplification can be made. */
8705 static tree
8706 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8708 const char *p1, *p2;
8710 if (!validate_arg (arg1, POINTER_TYPE)
8711 || !validate_arg (arg2, POINTER_TYPE))
8712 return NULL_TREE;
8714 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8715 if (operand_equal_p (arg1, arg2, 0))
8716 return integer_zero_node;
8718 p1 = c_getstr (arg1);
8719 p2 = c_getstr (arg2);
8721 if (p1 && p2)
8723 const int i = strcmp (p1, p2);
8724 if (i < 0)
8725 return integer_minus_one_node;
8726 else if (i > 0)
8727 return integer_one_node;
8728 else
8729 return integer_zero_node;
8732 /* If the second arg is "", return *(const unsigned char*)arg1. */
8733 if (p2 && *p2 == '\0')
8735 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8736 tree cst_uchar_ptr_node
8737 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8739 return fold_convert_loc (loc, integer_type_node,
8740 build1 (INDIRECT_REF, cst_uchar_node,
8741 fold_convert_loc (loc,
8742 cst_uchar_ptr_node,
8743 arg1)));
8746 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8747 if (p1 && *p1 == '\0')
8749 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8750 tree cst_uchar_ptr_node
8751 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8753 tree temp
8754 = fold_convert_loc (loc, integer_type_node,
8755 build1 (INDIRECT_REF, cst_uchar_node,
8756 fold_convert_loc (loc,
8757 cst_uchar_ptr_node,
8758 arg2)));
8759 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8762 return NULL_TREE;
8765 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8766 Return NULL_TREE if no simplification can be made. */
8768 static tree
8769 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8771 const char *p1, *p2;
8773 if (!validate_arg (arg1, POINTER_TYPE)
8774 || !validate_arg (arg2, POINTER_TYPE)
8775 || !validate_arg (len, INTEGER_TYPE))
8776 return NULL_TREE;
8778 /* If the LEN parameter is zero, return zero. */
8779 if (integer_zerop (len))
8780 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8781 arg1, arg2);
8783 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8784 if (operand_equal_p (arg1, arg2, 0))
8785 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8787 p1 = c_getstr (arg1);
8788 p2 = c_getstr (arg2);
8790 if (tree_fits_uhwi_p (len) && p1 && p2)
8792 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8793 if (i > 0)
8794 return integer_one_node;
8795 else if (i < 0)
8796 return integer_minus_one_node;
8797 else
8798 return integer_zero_node;
8801 /* If the second arg is "", and the length is greater than zero,
8802 return *(const unsigned char*)arg1. */
8803 if (p2 && *p2 == '\0'
8804 && TREE_CODE (len) == INTEGER_CST
8805 && tree_int_cst_sgn (len) == 1)
8807 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8808 tree cst_uchar_ptr_node
8809 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8811 return fold_convert_loc (loc, integer_type_node,
8812 build1 (INDIRECT_REF, cst_uchar_node,
8813 fold_convert_loc (loc,
8814 cst_uchar_ptr_node,
8815 arg1)));
8818 /* If the first arg is "", and the length is greater than zero,
8819 return -*(const unsigned char*)arg2. */
8820 if (p1 && *p1 == '\0'
8821 && TREE_CODE (len) == INTEGER_CST
8822 && tree_int_cst_sgn (len) == 1)
8824 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8825 tree cst_uchar_ptr_node
8826 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8828 tree temp = fold_convert_loc (loc, integer_type_node,
8829 build1 (INDIRECT_REF, cst_uchar_node,
8830 fold_convert_loc (loc,
8831 cst_uchar_ptr_node,
8832 arg2)));
8833 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8836 /* If len parameter is one, return an expression corresponding to
8837 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8838 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8840 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8841 tree cst_uchar_ptr_node
8842 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8844 tree ind1 = fold_convert_loc (loc, integer_type_node,
8845 build1 (INDIRECT_REF, cst_uchar_node,
8846 fold_convert_loc (loc,
8847 cst_uchar_ptr_node,
8848 arg1)));
8849 tree ind2 = fold_convert_loc (loc, integer_type_node,
8850 build1 (INDIRECT_REF, cst_uchar_node,
8851 fold_convert_loc (loc,
8852 cst_uchar_ptr_node,
8853 arg2)));
8854 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8857 return NULL_TREE;
8860 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8861 ARG. Return NULL_TREE if no simplification can be made. */
8863 static tree
8864 fold_builtin_signbit (location_t loc, tree arg, tree type)
8866 if (!validate_arg (arg, REAL_TYPE))
8867 return NULL_TREE;
8869 /* If ARG is a compile-time constant, determine the result. */
8870 if (TREE_CODE (arg) == REAL_CST
8871 && !TREE_OVERFLOW (arg))
8873 REAL_VALUE_TYPE c;
8875 c = TREE_REAL_CST (arg);
8876 return (REAL_VALUE_NEGATIVE (c)
8877 ? build_one_cst (type)
8878 : build_zero_cst (type));
8881 /* If ARG is non-negative, the result is always zero. */
8882 if (tree_expr_nonnegative_p (arg))
8883 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8885 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8886 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8887 return fold_convert (type,
8888 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8889 build_real (TREE_TYPE (arg), dconst0)));
8891 return NULL_TREE;
8894 /* Fold function call to builtin copysign, copysignf or copysignl with
8895 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8896 be made. */
8898 static tree
8899 fold_builtin_copysign (location_t loc, tree fndecl,
8900 tree arg1, tree arg2, tree type)
8902 tree tem;
8904 if (!validate_arg (arg1, REAL_TYPE)
8905 || !validate_arg (arg2, REAL_TYPE))
8906 return NULL_TREE;
8908 /* copysign(X,X) is X. */
8909 if (operand_equal_p (arg1, arg2, 0))
8910 return fold_convert_loc (loc, type, arg1);
8912 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8913 if (TREE_CODE (arg1) == REAL_CST
8914 && TREE_CODE (arg2) == REAL_CST
8915 && !TREE_OVERFLOW (arg1)
8916 && !TREE_OVERFLOW (arg2))
8918 REAL_VALUE_TYPE c1, c2;
8920 c1 = TREE_REAL_CST (arg1);
8921 c2 = TREE_REAL_CST (arg2);
8922 /* c1.sign := c2.sign. */
8923 real_copysign (&c1, &c2);
8924 return build_real (type, c1);
8927 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8928 Remember to evaluate Y for side-effects. */
8929 if (tree_expr_nonnegative_p (arg2))
8930 return omit_one_operand_loc (loc, type,
8931 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8932 arg2);
8934 /* Strip sign changing operations for the first argument. */
8935 tem = fold_strip_sign_ops (arg1);
8936 if (tem)
8937 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8939 return NULL_TREE;
8942 /* Fold a call to builtin isascii with argument ARG. */
8944 static tree
8945 fold_builtin_isascii (location_t loc, tree arg)
8947 if (!validate_arg (arg, INTEGER_TYPE))
8948 return NULL_TREE;
8949 else
8951 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8952 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8953 build_int_cst (integer_type_node,
8954 ~ (unsigned HOST_WIDE_INT) 0x7f));
8955 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8956 arg, integer_zero_node);
8960 /* Fold a call to builtin toascii with argument ARG. */
8962 static tree
8963 fold_builtin_toascii (location_t loc, tree arg)
8965 if (!validate_arg (arg, INTEGER_TYPE))
8966 return NULL_TREE;
8968 /* Transform toascii(c) -> (c & 0x7f). */
8969 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8970 build_int_cst (integer_type_node, 0x7f));
8973 /* Fold a call to builtin isdigit with argument ARG. */
8975 static tree
8976 fold_builtin_isdigit (location_t loc, tree arg)
8978 if (!validate_arg (arg, INTEGER_TYPE))
8979 return NULL_TREE;
8980 else
8982 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8983 /* According to the C standard, isdigit is unaffected by locale.
8984 However, it definitely is affected by the target character set. */
8985 unsigned HOST_WIDE_INT target_digit0
8986 = lang_hooks.to_target_charset ('0');
8988 if (target_digit0 == 0)
8989 return NULL_TREE;
8991 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8992 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8993 build_int_cst (unsigned_type_node, target_digit0));
8994 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8995 build_int_cst (unsigned_type_node, 9));
8999 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9001 static tree
9002 fold_builtin_fabs (location_t loc, tree arg, tree type)
9004 if (!validate_arg (arg, REAL_TYPE))
9005 return NULL_TREE;
9007 arg = fold_convert_loc (loc, type, arg);
9008 if (TREE_CODE (arg) == REAL_CST)
9009 return fold_abs_const (arg, type);
9010 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9013 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9015 static tree
9016 fold_builtin_abs (location_t loc, tree arg, tree type)
9018 if (!validate_arg (arg, INTEGER_TYPE))
9019 return NULL_TREE;
9021 arg = fold_convert_loc (loc, type, arg);
9022 if (TREE_CODE (arg) == INTEGER_CST)
9023 return fold_abs_const (arg, type);
9024 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9027 /* Fold a fma operation with arguments ARG[012]. */
9029 tree
9030 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9031 tree type, tree arg0, tree arg1, tree arg2)
9033 if (TREE_CODE (arg0) == REAL_CST
9034 && TREE_CODE (arg1) == REAL_CST
9035 && TREE_CODE (arg2) == REAL_CST)
9036 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9038 return NULL_TREE;
9041 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9043 static tree
9044 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9046 if (validate_arg (arg0, REAL_TYPE)
9047 && validate_arg (arg1, REAL_TYPE)
9048 && validate_arg (arg2, REAL_TYPE))
9050 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9051 if (tem)
9052 return tem;
9054 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9055 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9056 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9058 return NULL_TREE;
9061 /* Fold a call to builtin fmin or fmax. */
9063 static tree
9064 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9065 tree type, bool max)
9067 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9069 /* Calculate the result when the argument is a constant. */
9070 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9072 if (res)
9073 return res;
9075 /* If either argument is NaN, return the other one. Avoid the
9076 transformation if we get (and honor) a signalling NaN. Using
9077 omit_one_operand() ensures we create a non-lvalue. */
9078 if (TREE_CODE (arg0) == REAL_CST
9079 && real_isnan (&TREE_REAL_CST (arg0))
9080 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9081 || ! TREE_REAL_CST (arg0).signalling))
9082 return omit_one_operand_loc (loc, type, arg1, arg0);
9083 if (TREE_CODE (arg1) == REAL_CST
9084 && real_isnan (&TREE_REAL_CST (arg1))
9085 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9086 || ! TREE_REAL_CST (arg1).signalling))
9087 return omit_one_operand_loc (loc, type, arg0, arg1);
9089 /* Transform fmin/fmax(x,x) -> x. */
9090 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9091 return omit_one_operand_loc (loc, type, arg0, arg1);
9093 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9094 functions to return the numeric arg if the other one is NaN.
9095 These tree codes don't honor that, so only transform if
9096 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9097 handled, so we don't have to worry about it either. */
9098 if (flag_finite_math_only)
9099 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9100 fold_convert_loc (loc, type, arg0),
9101 fold_convert_loc (loc, type, arg1));
9103 return NULL_TREE;
9106 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9108 static tree
9109 fold_builtin_carg (location_t loc, tree arg, tree type)
9111 if (validate_arg (arg, COMPLEX_TYPE)
9112 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9114 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9116 if (atan2_fn)
9118 tree new_arg = builtin_save_expr (arg);
9119 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9120 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9121 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9125 return NULL_TREE;
9128 /* Fold a call to builtin logb/ilogb. */
9130 static tree
9131 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9133 if (! validate_arg (arg, REAL_TYPE))
9134 return NULL_TREE;
9136 STRIP_NOPS (arg);
9138 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9140 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9142 switch (value->cl)
9144 case rvc_nan:
9145 case rvc_inf:
9146 /* If arg is Inf or NaN and we're logb, return it. */
9147 if (TREE_CODE (rettype) == REAL_TYPE)
9149 /* For logb(-Inf) we have to return +Inf. */
9150 if (real_isinf (value) && real_isneg (value))
9152 REAL_VALUE_TYPE tem;
9153 real_inf (&tem);
9154 return build_real (rettype, tem);
9156 return fold_convert_loc (loc, rettype, arg);
9158 /* Fall through... */
9159 case rvc_zero:
9160 /* Zero may set errno and/or raise an exception for logb, also
9161 for ilogb we don't know FP_ILOGB0. */
9162 return NULL_TREE;
9163 case rvc_normal:
9164 /* For normal numbers, proceed iff radix == 2. In GCC,
9165 normalized significands are in the range [0.5, 1.0). We
9166 want the exponent as if they were [1.0, 2.0) so get the
9167 exponent and subtract 1. */
9168 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9169 return fold_convert_loc (loc, rettype,
9170 build_int_cst (integer_type_node,
9171 REAL_EXP (value)-1));
9172 break;
9176 return NULL_TREE;
9179 /* Fold a call to builtin significand, if radix == 2. */
9181 static tree
9182 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9184 if (! validate_arg (arg, REAL_TYPE))
9185 return NULL_TREE;
9187 STRIP_NOPS (arg);
9189 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9191 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9193 switch (value->cl)
9195 case rvc_zero:
9196 case rvc_nan:
9197 case rvc_inf:
9198 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9199 return fold_convert_loc (loc, rettype, arg);
9200 case rvc_normal:
9201 /* For normal numbers, proceed iff radix == 2. */
9202 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9204 REAL_VALUE_TYPE result = *value;
9205 /* In GCC, normalized significands are in the range [0.5,
9206 1.0). We want them to be [1.0, 2.0) so set the
9207 exponent to 1. */
9208 SET_REAL_EXP (&result, 1);
9209 return build_real (rettype, result);
9211 break;
9215 return NULL_TREE;
9218 /* Fold a call to builtin frexp, we can assume the base is 2. */
9220 static tree
9221 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9223 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9224 return NULL_TREE;
9226 STRIP_NOPS (arg0);
9228 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9229 return NULL_TREE;
9231 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9233 /* Proceed if a valid pointer type was passed in. */
9234 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9236 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9237 tree frac, exp;
9239 switch (value->cl)
9241 case rvc_zero:
9242 /* For +-0, return (*exp = 0, +-0). */
9243 exp = integer_zero_node;
9244 frac = arg0;
9245 break;
9246 case rvc_nan:
9247 case rvc_inf:
9248 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9249 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9250 case rvc_normal:
9252 /* Since the frexp function always expects base 2, and in
9253 GCC normalized significands are already in the range
9254 [0.5, 1.0), we have exactly what frexp wants. */
9255 REAL_VALUE_TYPE frac_rvt = *value;
9256 SET_REAL_EXP (&frac_rvt, 0);
9257 frac = build_real (rettype, frac_rvt);
9258 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9260 break;
9261 default:
9262 gcc_unreachable ();
9265 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9266 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9267 TREE_SIDE_EFFECTS (arg1) = 1;
9268 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9271 return NULL_TREE;
9274 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9275 then we can assume the base is two. If it's false, then we have to
9276 check the mode of the TYPE parameter in certain cases. */
9278 static tree
9279 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9280 tree type, bool ldexp)
9282 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9284 STRIP_NOPS (arg0);
9285 STRIP_NOPS (arg1);
9287 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9288 if (real_zerop (arg0) || integer_zerop (arg1)
9289 || (TREE_CODE (arg0) == REAL_CST
9290 && !real_isfinite (&TREE_REAL_CST (arg0))))
9291 return omit_one_operand_loc (loc, type, arg0, arg1);
9293 /* If both arguments are constant, then try to evaluate it. */
9294 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9295 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9296 && tree_fits_shwi_p (arg1))
9298 /* Bound the maximum adjustment to twice the range of the
9299 mode's valid exponents. Use abs to ensure the range is
9300 positive as a sanity check. */
9301 const long max_exp_adj = 2 *
9302 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9303 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9305 /* Get the user-requested adjustment. */
9306 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9308 /* The requested adjustment must be inside this range. This
9309 is a preliminary cap to avoid things like overflow, we
9310 may still fail to compute the result for other reasons. */
9311 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9313 REAL_VALUE_TYPE initial_result;
9315 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9317 /* Ensure we didn't overflow. */
9318 if (! real_isinf (&initial_result))
9320 const REAL_VALUE_TYPE trunc_result
9321 = real_value_truncate (TYPE_MODE (type), initial_result);
9323 /* Only proceed if the target mode can hold the
9324 resulting value. */
9325 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9326 return build_real (type, trunc_result);
9332 return NULL_TREE;
9335 /* Fold a call to builtin modf. */
9337 static tree
9338 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9340 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9341 return NULL_TREE;
9343 STRIP_NOPS (arg0);
9345 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9346 return NULL_TREE;
9348 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9350 /* Proceed if a valid pointer type was passed in. */
9351 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9353 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9354 REAL_VALUE_TYPE trunc, frac;
9356 switch (value->cl)
9358 case rvc_nan:
9359 case rvc_zero:
9360 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9361 trunc = frac = *value;
9362 break;
9363 case rvc_inf:
9364 /* For +-Inf, return (*arg1 = arg0, +-0). */
9365 frac = dconst0;
9366 frac.sign = value->sign;
9367 trunc = *value;
9368 break;
9369 case rvc_normal:
9370 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9371 real_trunc (&trunc, VOIDmode, value);
9372 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9373 /* If the original number was negative and already
9374 integral, then the fractional part is -0.0. */
9375 if (value->sign && frac.cl == rvc_zero)
9376 frac.sign = value->sign;
9377 break;
9380 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9381 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9382 build_real (rettype, trunc));
9383 TREE_SIDE_EFFECTS (arg1) = 1;
9384 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9385 build_real (rettype, frac));
9388 return NULL_TREE;
9391 /* Given a location LOC, an interclass builtin function decl FNDECL
9392 and its single argument ARG, return an folded expression computing
9393 the same, or NULL_TREE if we either couldn't or didn't want to fold
9394 (the latter happen if there's an RTL instruction available). */
9396 static tree
9397 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9399 enum machine_mode mode;
9401 if (!validate_arg (arg, REAL_TYPE))
9402 return NULL_TREE;
9404 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9405 return NULL_TREE;
9407 mode = TYPE_MODE (TREE_TYPE (arg));
9409 /* If there is no optab, try generic code. */
9410 switch (DECL_FUNCTION_CODE (fndecl))
9412 tree result;
9414 CASE_FLT_FN (BUILT_IN_ISINF):
9416 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9417 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9418 tree const type = TREE_TYPE (arg);
9419 REAL_VALUE_TYPE r;
9420 char buf[128];
9422 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9423 real_from_string (&r, buf);
9424 result = build_call_expr (isgr_fn, 2,
9425 fold_build1_loc (loc, ABS_EXPR, type, arg),
9426 build_real (type, r));
9427 return result;
9429 CASE_FLT_FN (BUILT_IN_FINITE):
9430 case BUILT_IN_ISFINITE:
9432 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9433 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9434 tree const type = TREE_TYPE (arg);
9435 REAL_VALUE_TYPE r;
9436 char buf[128];
9438 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9439 real_from_string (&r, buf);
9440 result = build_call_expr (isle_fn, 2,
9441 fold_build1_loc (loc, ABS_EXPR, type, arg),
9442 build_real (type, r));
9443 /*result = fold_build2_loc (loc, UNGT_EXPR,
9444 TREE_TYPE (TREE_TYPE (fndecl)),
9445 fold_build1_loc (loc, ABS_EXPR, type, arg),
9446 build_real (type, r));
9447 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9448 TREE_TYPE (TREE_TYPE (fndecl)),
9449 result);*/
9450 return result;
9452 case BUILT_IN_ISNORMAL:
9454 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9455 islessequal(fabs(x),DBL_MAX). */
9456 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9457 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9458 tree const type = TREE_TYPE (arg);
9459 REAL_VALUE_TYPE rmax, rmin;
9460 char buf[128];
9462 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9463 real_from_string (&rmax, buf);
9464 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9465 real_from_string (&rmin, buf);
9466 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9467 result = build_call_expr (isle_fn, 2, arg,
9468 build_real (type, rmax));
9469 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9470 build_call_expr (isge_fn, 2, arg,
9471 build_real (type, rmin)));
9472 return result;
9474 default:
9475 break;
9478 return NULL_TREE;
9481 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9482 ARG is the argument for the call. */
9484 static tree
9485 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9487 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9488 REAL_VALUE_TYPE r;
9490 if (!validate_arg (arg, REAL_TYPE))
9491 return NULL_TREE;
9493 switch (builtin_index)
9495 case BUILT_IN_ISINF:
9496 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9497 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9499 if (TREE_CODE (arg) == REAL_CST)
9501 r = TREE_REAL_CST (arg);
9502 if (real_isinf (&r))
9503 return real_compare (GT_EXPR, &r, &dconst0)
9504 ? integer_one_node : integer_minus_one_node;
9505 else
9506 return integer_zero_node;
9509 return NULL_TREE;
9511 case BUILT_IN_ISINF_SIGN:
9513 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9514 /* In a boolean context, GCC will fold the inner COND_EXPR to
9515 1. So e.g. "if (isinf_sign(x))" would be folded to just
9516 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9517 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9518 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9519 tree tmp = NULL_TREE;
9521 arg = builtin_save_expr (arg);
9523 if (signbit_fn && isinf_fn)
9525 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9526 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9528 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9529 signbit_call, integer_zero_node);
9530 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9531 isinf_call, integer_zero_node);
9533 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9534 integer_minus_one_node, integer_one_node);
9535 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9536 isinf_call, tmp,
9537 integer_zero_node);
9540 return tmp;
9543 case BUILT_IN_ISFINITE:
9544 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9545 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9546 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9548 if (TREE_CODE (arg) == REAL_CST)
9550 r = TREE_REAL_CST (arg);
9551 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9554 return NULL_TREE;
9556 case BUILT_IN_ISNAN:
9557 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9558 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9560 if (TREE_CODE (arg) == REAL_CST)
9562 r = TREE_REAL_CST (arg);
9563 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9566 arg = builtin_save_expr (arg);
9567 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9569 default:
9570 gcc_unreachable ();
9574 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9575 This builtin will generate code to return the appropriate floating
9576 point classification depending on the value of the floating point
9577 number passed in. The possible return values must be supplied as
9578 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9579 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9580 one floating point argument which is "type generic". */
9582 static tree
9583 fold_builtin_fpclassify (location_t loc, tree exp)
9585 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9586 arg, type, res, tmp;
9587 enum machine_mode mode;
9588 REAL_VALUE_TYPE r;
9589 char buf[128];
9591 /* Verify the required arguments in the original call. */
9592 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9593 INTEGER_TYPE, INTEGER_TYPE,
9594 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9595 return NULL_TREE;
9597 fp_nan = CALL_EXPR_ARG (exp, 0);
9598 fp_infinite = CALL_EXPR_ARG (exp, 1);
9599 fp_normal = CALL_EXPR_ARG (exp, 2);
9600 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9601 fp_zero = CALL_EXPR_ARG (exp, 4);
9602 arg = CALL_EXPR_ARG (exp, 5);
9603 type = TREE_TYPE (arg);
9604 mode = TYPE_MODE (type);
9605 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9607 /* fpclassify(x) ->
9608 isnan(x) ? FP_NAN :
9609 (fabs(x) == Inf ? FP_INFINITE :
9610 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9611 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9613 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9614 build_real (type, dconst0));
9615 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9616 tmp, fp_zero, fp_subnormal);
9618 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9619 real_from_string (&r, buf);
9620 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9621 arg, build_real (type, r));
9622 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9624 if (HONOR_INFINITIES (mode))
9626 real_inf (&r);
9627 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9628 build_real (type, r));
9629 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9630 fp_infinite, res);
9633 if (HONOR_NANS (mode))
9635 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9636 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9639 return res;
9642 /* Fold a call to an unordered comparison function such as
9643 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9644 being called and ARG0 and ARG1 are the arguments for the call.
9645 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9646 the opposite of the desired result. UNORDERED_CODE is used
9647 for modes that can hold NaNs and ORDERED_CODE is used for
9648 the rest. */
9650 static tree
9651 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9652 enum tree_code unordered_code,
9653 enum tree_code ordered_code)
9655 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9656 enum tree_code code;
9657 tree type0, type1;
9658 enum tree_code code0, code1;
9659 tree cmp_type = NULL_TREE;
9661 type0 = TREE_TYPE (arg0);
9662 type1 = TREE_TYPE (arg1);
9664 code0 = TREE_CODE (type0);
9665 code1 = TREE_CODE (type1);
9667 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9668 /* Choose the wider of two real types. */
9669 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9670 ? type0 : type1;
9671 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9672 cmp_type = type0;
9673 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9674 cmp_type = type1;
9676 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9677 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9679 if (unordered_code == UNORDERED_EXPR)
9681 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9682 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9683 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9686 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9687 : ordered_code;
9688 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9689 fold_build2_loc (loc, code, type, arg0, arg1));
9692 /* Fold a call to built-in function FNDECL with 0 arguments.
9693 IGNORE is true if the result of the function call is ignored. This
9694 function returns NULL_TREE if no simplification was possible. */
9696 static tree
9697 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9699 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9700 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9701 switch (fcode)
9703 CASE_FLT_FN (BUILT_IN_INF):
9704 case BUILT_IN_INFD32:
9705 case BUILT_IN_INFD64:
9706 case BUILT_IN_INFD128:
9707 return fold_builtin_inf (loc, type, true);
9709 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9710 return fold_builtin_inf (loc, type, false);
9712 case BUILT_IN_CLASSIFY_TYPE:
9713 return fold_builtin_classify_type (NULL_TREE);
9715 case BUILT_IN_UNREACHABLE:
9716 if (flag_sanitize & SANITIZE_UNREACHABLE
9717 && (current_function_decl == NULL
9718 || !lookup_attribute ("no_sanitize_undefined",
9719 DECL_ATTRIBUTES (current_function_decl))))
9720 return ubsan_instrument_unreachable (loc);
9721 break;
9723 default:
9724 break;
9726 return NULL_TREE;
9729 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9730 IGNORE is true if the result of the function call is ignored. This
9731 function returns NULL_TREE if no simplification was possible. */
9733 static tree
9734 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9736 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9737 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9738 switch (fcode)
9740 case BUILT_IN_CONSTANT_P:
9742 tree val = fold_builtin_constant_p (arg0);
9744 /* Gimplification will pull the CALL_EXPR for the builtin out of
9745 an if condition. When not optimizing, we'll not CSE it back.
9746 To avoid link error types of regressions, return false now. */
9747 if (!val && !optimize)
9748 val = integer_zero_node;
9750 return val;
9753 case BUILT_IN_CLASSIFY_TYPE:
9754 return fold_builtin_classify_type (arg0);
9756 case BUILT_IN_STRLEN:
9757 return fold_builtin_strlen (loc, type, arg0);
9759 CASE_FLT_FN (BUILT_IN_FABS):
9760 case BUILT_IN_FABSD32:
9761 case BUILT_IN_FABSD64:
9762 case BUILT_IN_FABSD128:
9763 return fold_builtin_fabs (loc, arg0, type);
9765 case BUILT_IN_ABS:
9766 case BUILT_IN_LABS:
9767 case BUILT_IN_LLABS:
9768 case BUILT_IN_IMAXABS:
9769 return fold_builtin_abs (loc, arg0, type);
9771 CASE_FLT_FN (BUILT_IN_CONJ):
9772 if (validate_arg (arg0, COMPLEX_TYPE)
9773 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9774 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9775 break;
9777 CASE_FLT_FN (BUILT_IN_CREAL):
9778 if (validate_arg (arg0, COMPLEX_TYPE)
9779 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9780 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9781 break;
9783 CASE_FLT_FN (BUILT_IN_CIMAG):
9784 if (validate_arg (arg0, COMPLEX_TYPE)
9785 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9786 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9787 break;
9789 CASE_FLT_FN (BUILT_IN_CCOS):
9790 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9792 CASE_FLT_FN (BUILT_IN_CCOSH):
9793 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9795 CASE_FLT_FN (BUILT_IN_CPROJ):
9796 return fold_builtin_cproj (loc, arg0, type);
9798 CASE_FLT_FN (BUILT_IN_CSIN):
9799 if (validate_arg (arg0, COMPLEX_TYPE)
9800 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9801 return do_mpc_arg1 (arg0, type, mpc_sin);
9802 break;
9804 CASE_FLT_FN (BUILT_IN_CSINH):
9805 if (validate_arg (arg0, COMPLEX_TYPE)
9806 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9807 return do_mpc_arg1 (arg0, type, mpc_sinh);
9808 break;
9810 CASE_FLT_FN (BUILT_IN_CTAN):
9811 if (validate_arg (arg0, COMPLEX_TYPE)
9812 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9813 return do_mpc_arg1 (arg0, type, mpc_tan);
9814 break;
9816 CASE_FLT_FN (BUILT_IN_CTANH):
9817 if (validate_arg (arg0, COMPLEX_TYPE)
9818 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9819 return do_mpc_arg1 (arg0, type, mpc_tanh);
9820 break;
9822 CASE_FLT_FN (BUILT_IN_CLOG):
9823 if (validate_arg (arg0, COMPLEX_TYPE)
9824 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9825 return do_mpc_arg1 (arg0, type, mpc_log);
9826 break;
9828 CASE_FLT_FN (BUILT_IN_CSQRT):
9829 if (validate_arg (arg0, COMPLEX_TYPE)
9830 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9831 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9832 break;
9834 CASE_FLT_FN (BUILT_IN_CASIN):
9835 if (validate_arg (arg0, COMPLEX_TYPE)
9836 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9837 return do_mpc_arg1 (arg0, type, mpc_asin);
9838 break;
9840 CASE_FLT_FN (BUILT_IN_CACOS):
9841 if (validate_arg (arg0, COMPLEX_TYPE)
9842 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9843 return do_mpc_arg1 (arg0, type, mpc_acos);
9844 break;
9846 CASE_FLT_FN (BUILT_IN_CATAN):
9847 if (validate_arg (arg0, COMPLEX_TYPE)
9848 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9849 return do_mpc_arg1 (arg0, type, mpc_atan);
9850 break;
9852 CASE_FLT_FN (BUILT_IN_CASINH):
9853 if (validate_arg (arg0, COMPLEX_TYPE)
9854 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9855 return do_mpc_arg1 (arg0, type, mpc_asinh);
9856 break;
9858 CASE_FLT_FN (BUILT_IN_CACOSH):
9859 if (validate_arg (arg0, COMPLEX_TYPE)
9860 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9861 return do_mpc_arg1 (arg0, type, mpc_acosh);
9862 break;
9864 CASE_FLT_FN (BUILT_IN_CATANH):
9865 if (validate_arg (arg0, COMPLEX_TYPE)
9866 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9867 return do_mpc_arg1 (arg0, type, mpc_atanh);
9868 break;
9870 CASE_FLT_FN (BUILT_IN_CABS):
9871 return fold_builtin_cabs (loc, arg0, type, fndecl);
9873 CASE_FLT_FN (BUILT_IN_CARG):
9874 return fold_builtin_carg (loc, arg0, type);
9876 CASE_FLT_FN (BUILT_IN_SQRT):
9877 return fold_builtin_sqrt (loc, arg0, type);
9879 CASE_FLT_FN (BUILT_IN_CBRT):
9880 return fold_builtin_cbrt (loc, arg0, type);
9882 CASE_FLT_FN (BUILT_IN_ASIN):
9883 if (validate_arg (arg0, REAL_TYPE))
9884 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9885 &dconstm1, &dconst1, true);
9886 break;
9888 CASE_FLT_FN (BUILT_IN_ACOS):
9889 if (validate_arg (arg0, REAL_TYPE))
9890 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9891 &dconstm1, &dconst1, true);
9892 break;
9894 CASE_FLT_FN (BUILT_IN_ATAN):
9895 if (validate_arg (arg0, REAL_TYPE))
9896 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9897 break;
9899 CASE_FLT_FN (BUILT_IN_ASINH):
9900 if (validate_arg (arg0, REAL_TYPE))
9901 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9902 break;
9904 CASE_FLT_FN (BUILT_IN_ACOSH):
9905 if (validate_arg (arg0, REAL_TYPE))
9906 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9907 &dconst1, NULL, true);
9908 break;
9910 CASE_FLT_FN (BUILT_IN_ATANH):
9911 if (validate_arg (arg0, REAL_TYPE))
9912 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9913 &dconstm1, &dconst1, false);
9914 break;
9916 CASE_FLT_FN (BUILT_IN_SIN):
9917 if (validate_arg (arg0, REAL_TYPE))
9918 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9919 break;
9921 CASE_FLT_FN (BUILT_IN_COS):
9922 return fold_builtin_cos (loc, arg0, type, fndecl);
9924 CASE_FLT_FN (BUILT_IN_TAN):
9925 return fold_builtin_tan (arg0, type);
9927 CASE_FLT_FN (BUILT_IN_CEXP):
9928 return fold_builtin_cexp (loc, arg0, type);
9930 CASE_FLT_FN (BUILT_IN_CEXPI):
9931 if (validate_arg (arg0, REAL_TYPE))
9932 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9933 break;
9935 CASE_FLT_FN (BUILT_IN_SINH):
9936 if (validate_arg (arg0, REAL_TYPE))
9937 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9938 break;
9940 CASE_FLT_FN (BUILT_IN_COSH):
9941 return fold_builtin_cosh (loc, arg0, type, fndecl);
9943 CASE_FLT_FN (BUILT_IN_TANH):
9944 if (validate_arg (arg0, REAL_TYPE))
9945 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9946 break;
9948 CASE_FLT_FN (BUILT_IN_ERF):
9949 if (validate_arg (arg0, REAL_TYPE))
9950 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9951 break;
9953 CASE_FLT_FN (BUILT_IN_ERFC):
9954 if (validate_arg (arg0, REAL_TYPE))
9955 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9956 break;
9958 CASE_FLT_FN (BUILT_IN_TGAMMA):
9959 if (validate_arg (arg0, REAL_TYPE))
9960 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9961 break;
9963 CASE_FLT_FN (BUILT_IN_EXP):
9964 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9966 CASE_FLT_FN (BUILT_IN_EXP2):
9967 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9969 CASE_FLT_FN (BUILT_IN_EXP10):
9970 CASE_FLT_FN (BUILT_IN_POW10):
9971 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9973 CASE_FLT_FN (BUILT_IN_EXPM1):
9974 if (validate_arg (arg0, REAL_TYPE))
9975 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9976 break;
9978 CASE_FLT_FN (BUILT_IN_LOG):
9979 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9981 CASE_FLT_FN (BUILT_IN_LOG2):
9982 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9984 CASE_FLT_FN (BUILT_IN_LOG10):
9985 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9987 CASE_FLT_FN (BUILT_IN_LOG1P):
9988 if (validate_arg (arg0, REAL_TYPE))
9989 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9990 &dconstm1, NULL, false);
9991 break;
9993 CASE_FLT_FN (BUILT_IN_J0):
9994 if (validate_arg (arg0, REAL_TYPE))
9995 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9996 NULL, NULL, 0);
9997 break;
9999 CASE_FLT_FN (BUILT_IN_J1):
10000 if (validate_arg (arg0, REAL_TYPE))
10001 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10002 NULL, NULL, 0);
10003 break;
10005 CASE_FLT_FN (BUILT_IN_Y0):
10006 if (validate_arg (arg0, REAL_TYPE))
10007 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10008 &dconst0, NULL, false);
10009 break;
10011 CASE_FLT_FN (BUILT_IN_Y1):
10012 if (validate_arg (arg0, REAL_TYPE))
10013 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10014 &dconst0, NULL, false);
10015 break;
10017 CASE_FLT_FN (BUILT_IN_NAN):
10018 case BUILT_IN_NAND32:
10019 case BUILT_IN_NAND64:
10020 case BUILT_IN_NAND128:
10021 return fold_builtin_nan (arg0, type, true);
10023 CASE_FLT_FN (BUILT_IN_NANS):
10024 return fold_builtin_nan (arg0, type, false);
10026 CASE_FLT_FN (BUILT_IN_FLOOR):
10027 return fold_builtin_floor (loc, fndecl, arg0);
10029 CASE_FLT_FN (BUILT_IN_CEIL):
10030 return fold_builtin_ceil (loc, fndecl, arg0);
10032 CASE_FLT_FN (BUILT_IN_TRUNC):
10033 return fold_builtin_trunc (loc, fndecl, arg0);
10035 CASE_FLT_FN (BUILT_IN_ROUND):
10036 return fold_builtin_round (loc, fndecl, arg0);
10038 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10039 CASE_FLT_FN (BUILT_IN_RINT):
10040 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10042 CASE_FLT_FN (BUILT_IN_ICEIL):
10043 CASE_FLT_FN (BUILT_IN_LCEIL):
10044 CASE_FLT_FN (BUILT_IN_LLCEIL):
10045 CASE_FLT_FN (BUILT_IN_LFLOOR):
10046 CASE_FLT_FN (BUILT_IN_IFLOOR):
10047 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10048 CASE_FLT_FN (BUILT_IN_IROUND):
10049 CASE_FLT_FN (BUILT_IN_LROUND):
10050 CASE_FLT_FN (BUILT_IN_LLROUND):
10051 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10053 CASE_FLT_FN (BUILT_IN_IRINT):
10054 CASE_FLT_FN (BUILT_IN_LRINT):
10055 CASE_FLT_FN (BUILT_IN_LLRINT):
10056 return fold_fixed_mathfn (loc, fndecl, arg0);
10058 case BUILT_IN_BSWAP16:
10059 case BUILT_IN_BSWAP32:
10060 case BUILT_IN_BSWAP64:
10061 return fold_builtin_bswap (fndecl, arg0);
10063 CASE_INT_FN (BUILT_IN_FFS):
10064 CASE_INT_FN (BUILT_IN_CLZ):
10065 CASE_INT_FN (BUILT_IN_CTZ):
10066 CASE_INT_FN (BUILT_IN_CLRSB):
10067 CASE_INT_FN (BUILT_IN_POPCOUNT):
10068 CASE_INT_FN (BUILT_IN_PARITY):
10069 return fold_builtin_bitop (fndecl, arg0);
10071 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10072 return fold_builtin_signbit (loc, arg0, type);
10074 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10075 return fold_builtin_significand (loc, arg0, type);
10077 CASE_FLT_FN (BUILT_IN_ILOGB):
10078 CASE_FLT_FN (BUILT_IN_LOGB):
10079 return fold_builtin_logb (loc, arg0, type);
10081 case BUILT_IN_ISASCII:
10082 return fold_builtin_isascii (loc, arg0);
10084 case BUILT_IN_TOASCII:
10085 return fold_builtin_toascii (loc, arg0);
10087 case BUILT_IN_ISDIGIT:
10088 return fold_builtin_isdigit (loc, arg0);
10090 CASE_FLT_FN (BUILT_IN_FINITE):
10091 case BUILT_IN_FINITED32:
10092 case BUILT_IN_FINITED64:
10093 case BUILT_IN_FINITED128:
10094 case BUILT_IN_ISFINITE:
10096 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10097 if (ret)
10098 return ret;
10099 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10102 CASE_FLT_FN (BUILT_IN_ISINF):
10103 case BUILT_IN_ISINFD32:
10104 case BUILT_IN_ISINFD64:
10105 case BUILT_IN_ISINFD128:
10107 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10108 if (ret)
10109 return ret;
10110 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10113 case BUILT_IN_ISNORMAL:
10114 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10116 case BUILT_IN_ISINF_SIGN:
10117 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10119 CASE_FLT_FN (BUILT_IN_ISNAN):
10120 case BUILT_IN_ISNAND32:
10121 case BUILT_IN_ISNAND64:
10122 case BUILT_IN_ISNAND128:
10123 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10125 case BUILT_IN_PRINTF:
10126 case BUILT_IN_PRINTF_UNLOCKED:
10127 case BUILT_IN_VPRINTF:
10128 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10130 case BUILT_IN_FREE:
10131 if (integer_zerop (arg0))
10132 return build_empty_stmt (loc);
10133 break;
10135 default:
10136 break;
10139 return NULL_TREE;
10143 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10144 IGNORE is true if the result of the function call is ignored. This
10145 function returns NULL_TREE if no simplification was possible. */
10147 static tree
10148 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10150 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10151 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10153 switch (fcode)
10155 CASE_FLT_FN (BUILT_IN_JN):
10156 if (validate_arg (arg0, INTEGER_TYPE)
10157 && validate_arg (arg1, REAL_TYPE))
10158 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10159 break;
10161 CASE_FLT_FN (BUILT_IN_YN):
10162 if (validate_arg (arg0, INTEGER_TYPE)
10163 && validate_arg (arg1, REAL_TYPE))
10164 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10165 &dconst0, false);
10166 break;
10168 CASE_FLT_FN (BUILT_IN_DREM):
10169 CASE_FLT_FN (BUILT_IN_REMAINDER):
10170 if (validate_arg (arg0, REAL_TYPE)
10171 && validate_arg (arg1, REAL_TYPE))
10172 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10173 break;
10175 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10176 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10177 if (validate_arg (arg0, REAL_TYPE)
10178 && validate_arg (arg1, POINTER_TYPE))
10179 return do_mpfr_lgamma_r (arg0, arg1, type);
10180 break;
10182 CASE_FLT_FN (BUILT_IN_ATAN2):
10183 if (validate_arg (arg0, REAL_TYPE)
10184 && validate_arg (arg1, REAL_TYPE))
10185 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10186 break;
10188 CASE_FLT_FN (BUILT_IN_FDIM):
10189 if (validate_arg (arg0, REAL_TYPE)
10190 && validate_arg (arg1, REAL_TYPE))
10191 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10192 break;
10194 CASE_FLT_FN (BUILT_IN_HYPOT):
10195 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10197 CASE_FLT_FN (BUILT_IN_CPOW):
10198 if (validate_arg (arg0, COMPLEX_TYPE)
10199 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10200 && validate_arg (arg1, COMPLEX_TYPE)
10201 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10202 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10203 break;
10205 CASE_FLT_FN (BUILT_IN_LDEXP):
10206 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10207 CASE_FLT_FN (BUILT_IN_SCALBN):
10208 CASE_FLT_FN (BUILT_IN_SCALBLN):
10209 return fold_builtin_load_exponent (loc, arg0, arg1,
10210 type, /*ldexp=*/false);
10212 CASE_FLT_FN (BUILT_IN_FREXP):
10213 return fold_builtin_frexp (loc, arg0, arg1, type);
10215 CASE_FLT_FN (BUILT_IN_MODF):
10216 return fold_builtin_modf (loc, arg0, arg1, type);
10218 case BUILT_IN_STRSTR:
10219 return fold_builtin_strstr (loc, arg0, arg1, type);
10221 case BUILT_IN_STRSPN:
10222 return fold_builtin_strspn (loc, arg0, arg1);
10224 case BUILT_IN_STRCSPN:
10225 return fold_builtin_strcspn (loc, arg0, arg1);
10227 case BUILT_IN_STRCHR:
10228 case BUILT_IN_INDEX:
10229 return fold_builtin_strchr (loc, arg0, arg1, type);
10231 case BUILT_IN_STRRCHR:
10232 case BUILT_IN_RINDEX:
10233 return fold_builtin_strrchr (loc, arg0, arg1, type);
10235 case BUILT_IN_STPCPY:
10236 if (ignore)
10238 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10239 if (!fn)
10240 break;
10242 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10244 else
10245 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10246 break;
10248 case BUILT_IN_STRCMP:
10249 return fold_builtin_strcmp (loc, arg0, arg1);
10251 case BUILT_IN_STRPBRK:
10252 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10254 case BUILT_IN_EXPECT:
10255 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10257 CASE_FLT_FN (BUILT_IN_POW):
10258 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10260 CASE_FLT_FN (BUILT_IN_POWI):
10261 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10263 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10264 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10266 CASE_FLT_FN (BUILT_IN_FMIN):
10267 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10269 CASE_FLT_FN (BUILT_IN_FMAX):
10270 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10272 case BUILT_IN_ISGREATER:
10273 return fold_builtin_unordered_cmp (loc, fndecl,
10274 arg0, arg1, UNLE_EXPR, LE_EXPR);
10275 case BUILT_IN_ISGREATEREQUAL:
10276 return fold_builtin_unordered_cmp (loc, fndecl,
10277 arg0, arg1, UNLT_EXPR, LT_EXPR);
10278 case BUILT_IN_ISLESS:
10279 return fold_builtin_unordered_cmp (loc, fndecl,
10280 arg0, arg1, UNGE_EXPR, GE_EXPR);
10281 case BUILT_IN_ISLESSEQUAL:
10282 return fold_builtin_unordered_cmp (loc, fndecl,
10283 arg0, arg1, UNGT_EXPR, GT_EXPR);
10284 case BUILT_IN_ISLESSGREATER:
10285 return fold_builtin_unordered_cmp (loc, fndecl,
10286 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10287 case BUILT_IN_ISUNORDERED:
10288 return fold_builtin_unordered_cmp (loc, fndecl,
10289 arg0, arg1, UNORDERED_EXPR,
10290 NOP_EXPR);
10292 /* We do the folding for va_start in the expander. */
10293 case BUILT_IN_VA_START:
10294 break;
10296 case BUILT_IN_OBJECT_SIZE:
10297 return fold_builtin_object_size (arg0, arg1);
10299 case BUILT_IN_PRINTF:
10300 case BUILT_IN_PRINTF_UNLOCKED:
10301 case BUILT_IN_VPRINTF:
10302 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10304 case BUILT_IN_PRINTF_CHK:
10305 case BUILT_IN_VPRINTF_CHK:
10306 if (!validate_arg (arg0, INTEGER_TYPE)
10307 || TREE_SIDE_EFFECTS (arg0))
10308 return NULL_TREE;
10309 else
10310 return fold_builtin_printf (loc, fndecl,
10311 arg1, NULL_TREE, ignore, fcode);
10312 break;
10314 case BUILT_IN_FPRINTF:
10315 case BUILT_IN_FPRINTF_UNLOCKED:
10316 case BUILT_IN_VFPRINTF:
10317 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10318 ignore, fcode);
10320 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10321 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10323 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10324 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10326 default:
10327 break;
10329 return NULL_TREE;
10332 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10333 and ARG2. IGNORE is true if the result of the function call is ignored.
10334 This function returns NULL_TREE if no simplification was possible. */
10336 static tree
10337 fold_builtin_3 (location_t loc, tree fndecl,
10338 tree arg0, tree arg1, tree arg2, bool ignore)
10340 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10341 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10342 switch (fcode)
10345 CASE_FLT_FN (BUILT_IN_SINCOS):
10346 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10348 CASE_FLT_FN (BUILT_IN_FMA):
10349 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10350 break;
10352 CASE_FLT_FN (BUILT_IN_REMQUO):
10353 if (validate_arg (arg0, REAL_TYPE)
10354 && validate_arg (arg1, REAL_TYPE)
10355 && validate_arg (arg2, POINTER_TYPE))
10356 return do_mpfr_remquo (arg0, arg1, arg2);
10357 break;
10359 case BUILT_IN_STRNCAT:
10360 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10362 case BUILT_IN_STRNCMP:
10363 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10365 case BUILT_IN_MEMCHR:
10366 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10368 case BUILT_IN_BCMP:
10369 case BUILT_IN_MEMCMP:
10370 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10372 case BUILT_IN_PRINTF_CHK:
10373 case BUILT_IN_VPRINTF_CHK:
10374 if (!validate_arg (arg0, INTEGER_TYPE)
10375 || TREE_SIDE_EFFECTS (arg0))
10376 return NULL_TREE;
10377 else
10378 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10379 break;
10381 case BUILT_IN_FPRINTF:
10382 case BUILT_IN_FPRINTF_UNLOCKED:
10383 case BUILT_IN_VFPRINTF:
10384 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10385 ignore, fcode);
10387 case BUILT_IN_FPRINTF_CHK:
10388 case BUILT_IN_VFPRINTF_CHK:
10389 if (!validate_arg (arg1, INTEGER_TYPE)
10390 || TREE_SIDE_EFFECTS (arg1))
10391 return NULL_TREE;
10392 else
10393 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10394 ignore, fcode);
10396 case BUILT_IN_EXPECT:
10397 return fold_builtin_expect (loc, arg0, arg1, arg2);
10399 default:
10400 break;
10402 return NULL_TREE;
10405 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10406 ARG2, and ARG3. IGNORE is true if the result of the function call is
10407 ignored. This function returns NULL_TREE if no simplification was
10408 possible. */
10410 static tree
10411 fold_builtin_4 (location_t loc, tree fndecl,
10412 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10414 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10416 switch (fcode)
10418 case BUILT_IN_STRNCAT_CHK:
10419 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10421 case BUILT_IN_FPRINTF_CHK:
10422 case BUILT_IN_VFPRINTF_CHK:
10423 if (!validate_arg (arg1, INTEGER_TYPE)
10424 || TREE_SIDE_EFFECTS (arg1))
10425 return NULL_TREE;
10426 else
10427 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10428 ignore, fcode);
10429 break;
10431 default:
10432 break;
10434 return NULL_TREE;
10437 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10438 arguments, where NARGS <= 4. IGNORE is true if the result of the
10439 function call is ignored. This function returns NULL_TREE if no
10440 simplification was possible. Note that this only folds builtins with
10441 fixed argument patterns. Foldings that do varargs-to-varargs
10442 transformations, or that match calls with more than 4 arguments,
10443 need to be handled with fold_builtin_varargs instead. */
10445 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10447 static tree
10448 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10450 tree ret = NULL_TREE;
10452 switch (nargs)
10454 case 0:
10455 ret = fold_builtin_0 (loc, fndecl, ignore);
10456 break;
10457 case 1:
10458 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10459 break;
10460 case 2:
10461 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10462 break;
10463 case 3:
10464 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10465 break;
10466 case 4:
10467 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10468 ignore);
10469 break;
10470 default:
10471 break;
10473 if (ret)
10475 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10476 SET_EXPR_LOCATION (ret, loc);
10477 TREE_NO_WARNING (ret) = 1;
10478 return ret;
10480 return NULL_TREE;
10483 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10484 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10485 of arguments in ARGS to be omitted. OLDNARGS is the number of
10486 elements in ARGS. */
10488 static tree
10489 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10490 int skip, tree fndecl, int n, va_list newargs)
10492 int nargs = oldnargs - skip + n;
10493 tree *buffer;
10495 if (n > 0)
10497 int i, j;
10499 buffer = XALLOCAVEC (tree, nargs);
10500 for (i = 0; i < n; i++)
10501 buffer[i] = va_arg (newargs, tree);
10502 for (j = skip; j < oldnargs; j++, i++)
10503 buffer[i] = args[j];
10505 else
10506 buffer = args + skip;
10508 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10511 /* Return true if FNDECL shouldn't be folded right now.
10512 If a built-in function has an inline attribute always_inline
10513 wrapper, defer folding it after always_inline functions have
10514 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10515 might not be performed. */
10517 bool
10518 avoid_folding_inline_builtin (tree fndecl)
10520 return (DECL_DECLARED_INLINE_P (fndecl)
10521 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10522 && cfun
10523 && !cfun->always_inline_functions_inlined
10524 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10527 /* A wrapper function for builtin folding that prevents warnings for
10528 "statement without effect" and the like, caused by removing the
10529 call node earlier than the warning is generated. */
10531 tree
10532 fold_call_expr (location_t loc, tree exp, bool ignore)
10534 tree ret = NULL_TREE;
10535 tree fndecl = get_callee_fndecl (exp);
10536 if (fndecl
10537 && TREE_CODE (fndecl) == FUNCTION_DECL
10538 && DECL_BUILT_IN (fndecl)
10539 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10540 yet. Defer folding until we see all the arguments
10541 (after inlining). */
10542 && !CALL_EXPR_VA_ARG_PACK (exp))
10544 int nargs = call_expr_nargs (exp);
10546 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10547 instead last argument is __builtin_va_arg_pack (). Defer folding
10548 even in that case, until arguments are finalized. */
10549 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10551 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10552 if (fndecl2
10553 && TREE_CODE (fndecl2) == FUNCTION_DECL
10554 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10555 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10556 return NULL_TREE;
10559 if (avoid_folding_inline_builtin (fndecl))
10560 return NULL_TREE;
10562 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10563 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10564 CALL_EXPR_ARGP (exp), ignore);
10565 else
10567 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10569 tree *args = CALL_EXPR_ARGP (exp);
10570 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10572 if (!ret)
10573 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10574 if (ret)
10575 return ret;
10578 return NULL_TREE;
10581 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10582 N arguments are passed in the array ARGARRAY. */
10584 tree
10585 fold_builtin_call_array (location_t loc, tree type,
10586 tree fn,
10587 int n,
10588 tree *argarray)
10590 tree ret = NULL_TREE;
10591 tree exp;
10593 if (TREE_CODE (fn) == ADDR_EXPR)
10595 tree fndecl = TREE_OPERAND (fn, 0);
10596 if (TREE_CODE (fndecl) == FUNCTION_DECL
10597 && DECL_BUILT_IN (fndecl))
10599 /* If last argument is __builtin_va_arg_pack (), arguments to this
10600 function are not finalized yet. Defer folding until they are. */
10601 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10603 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10604 if (fndecl2
10605 && TREE_CODE (fndecl2) == FUNCTION_DECL
10606 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10607 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10608 return build_call_array_loc (loc, type, fn, n, argarray);
10610 if (avoid_folding_inline_builtin (fndecl))
10611 return build_call_array_loc (loc, type, fn, n, argarray);
10612 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10614 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10615 if (ret)
10616 return ret;
10618 return build_call_array_loc (loc, type, fn, n, argarray);
10620 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10622 /* First try the transformations that don't require consing up
10623 an exp. */
10624 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10625 if (ret)
10626 return ret;
10629 /* If we got this far, we need to build an exp. */
10630 exp = build_call_array_loc (loc, type, fn, n, argarray);
10631 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10632 return ret ? ret : exp;
10636 return build_call_array_loc (loc, type, fn, n, argarray);
10639 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10640 along with N new arguments specified as the "..." parameters. SKIP
10641 is the number of arguments in EXP to be omitted. This function is used
10642 to do varargs-to-varargs transformations. */
10644 static tree
10645 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10647 va_list ap;
10648 tree t;
10650 va_start (ap, n);
10651 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10652 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10653 va_end (ap);
10655 return t;
10658 /* Validate a single argument ARG against a tree code CODE representing
10659 a type. */
10661 static bool
10662 validate_arg (const_tree arg, enum tree_code code)
10664 if (!arg)
10665 return false;
10666 else if (code == POINTER_TYPE)
10667 return POINTER_TYPE_P (TREE_TYPE (arg));
10668 else if (code == INTEGER_TYPE)
10669 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10670 return code == TREE_CODE (TREE_TYPE (arg));
10673 /* This function validates the types of a function call argument list
10674 against a specified list of tree_codes. If the last specifier is a 0,
10675 that represents an ellipses, otherwise the last specifier must be a
10676 VOID_TYPE.
10678 This is the GIMPLE version of validate_arglist. Eventually we want to
10679 completely convert builtins.c to work from GIMPLEs and the tree based
10680 validate_arglist will then be removed. */
10682 bool
10683 validate_gimple_arglist (const_gimple call, ...)
10685 enum tree_code code;
10686 bool res = 0;
10687 va_list ap;
10688 const_tree arg;
10689 size_t i;
10691 va_start (ap, call);
10692 i = 0;
10696 code = (enum tree_code) va_arg (ap, int);
10697 switch (code)
10699 case 0:
10700 /* This signifies an ellipses, any further arguments are all ok. */
10701 res = true;
10702 goto end;
10703 case VOID_TYPE:
10704 /* This signifies an endlink, if no arguments remain, return
10705 true, otherwise return false. */
10706 res = (i == gimple_call_num_args (call));
10707 goto end;
10708 default:
10709 /* If no parameters remain or the parameter's code does not
10710 match the specified code, return false. Otherwise continue
10711 checking any remaining arguments. */
10712 arg = gimple_call_arg (call, i++);
10713 if (!validate_arg (arg, code))
10714 goto end;
10715 break;
10718 while (1);
10720 /* We need gotos here since we can only have one VA_CLOSE in a
10721 function. */
10722 end: ;
10723 va_end (ap);
10725 return res;
10728 /* Default target-specific builtin expander that does nothing. */
10731 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10732 rtx target ATTRIBUTE_UNUSED,
10733 rtx subtarget ATTRIBUTE_UNUSED,
10734 enum machine_mode mode ATTRIBUTE_UNUSED,
10735 int ignore ATTRIBUTE_UNUSED)
10737 return NULL_RTX;
10740 /* Returns true is EXP represents data that would potentially reside
10741 in a readonly section. */
10743 bool
10744 readonly_data_expr (tree exp)
10746 STRIP_NOPS (exp);
10748 if (TREE_CODE (exp) != ADDR_EXPR)
10749 return false;
10751 exp = get_base_address (TREE_OPERAND (exp, 0));
10752 if (!exp)
10753 return false;
10755 /* Make sure we call decl_readonly_section only for trees it
10756 can handle (since it returns true for everything it doesn't
10757 understand). */
10758 if (TREE_CODE (exp) == STRING_CST
10759 || TREE_CODE (exp) == CONSTRUCTOR
10760 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10761 return decl_readonly_section (exp, 0);
10762 else
10763 return false;
10766 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10767 to the call, and TYPE is its return type.
10769 Return NULL_TREE if no simplification was possible, otherwise return the
10770 simplified form of the call as a tree.
10772 The simplified form may be a constant or other expression which
10773 computes the same value, but in a more efficient manner (including
10774 calls to other builtin functions).
10776 The call may contain arguments which need to be evaluated, but
10777 which are not useful to determine the result of the call. In
10778 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10779 COMPOUND_EXPR will be an argument which must be evaluated.
10780 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10781 COMPOUND_EXPR in the chain will contain the tree for the simplified
10782 form of the builtin function call. */
10784 static tree
10785 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10787 if (!validate_arg (s1, POINTER_TYPE)
10788 || !validate_arg (s2, POINTER_TYPE))
10789 return NULL_TREE;
10790 else
10792 tree fn;
10793 const char *p1, *p2;
10795 p2 = c_getstr (s2);
10796 if (p2 == NULL)
10797 return NULL_TREE;
10799 p1 = c_getstr (s1);
10800 if (p1 != NULL)
10802 const char *r = strstr (p1, p2);
10803 tree tem;
10805 if (r == NULL)
10806 return build_int_cst (TREE_TYPE (s1), 0);
10808 /* Return an offset into the constant string argument. */
10809 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10810 return fold_convert_loc (loc, type, tem);
10813 /* The argument is const char *, and the result is char *, so we need
10814 a type conversion here to avoid a warning. */
10815 if (p2[0] == '\0')
10816 return fold_convert_loc (loc, type, s1);
10818 if (p2[1] != '\0')
10819 return NULL_TREE;
10821 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10822 if (!fn)
10823 return NULL_TREE;
10825 /* New argument list transforming strstr(s1, s2) to
10826 strchr(s1, s2[0]). */
10827 return build_call_expr_loc (loc, fn, 2, s1,
10828 build_int_cst (integer_type_node, p2[0]));
10832 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10833 the call, and TYPE is its return type.
10835 Return NULL_TREE if no simplification was possible, otherwise return the
10836 simplified form of the call as a tree.
10838 The simplified form may be a constant or other expression which
10839 computes the same value, but in a more efficient manner (including
10840 calls to other builtin functions).
10842 The call may contain arguments which need to be evaluated, but
10843 which are not useful to determine the result of the call. In
10844 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10845 COMPOUND_EXPR will be an argument which must be evaluated.
10846 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10847 COMPOUND_EXPR in the chain will contain the tree for the simplified
10848 form of the builtin function call. */
10850 static tree
10851 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10853 if (!validate_arg (s1, POINTER_TYPE)
10854 || !validate_arg (s2, INTEGER_TYPE))
10855 return NULL_TREE;
10856 else
10858 const char *p1;
10860 if (TREE_CODE (s2) != INTEGER_CST)
10861 return NULL_TREE;
10863 p1 = c_getstr (s1);
10864 if (p1 != NULL)
10866 char c;
10867 const char *r;
10868 tree tem;
10870 if (target_char_cast (s2, &c))
10871 return NULL_TREE;
10873 r = strchr (p1, c);
10875 if (r == NULL)
10876 return build_int_cst (TREE_TYPE (s1), 0);
10878 /* Return an offset into the constant string argument. */
10879 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10880 return fold_convert_loc (loc, type, tem);
10882 return NULL_TREE;
10886 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10887 the call, and TYPE is its return type.
10889 Return NULL_TREE if no simplification was possible, otherwise return the
10890 simplified form of the call as a tree.
10892 The simplified form may be a constant or other expression which
10893 computes the same value, but in a more efficient manner (including
10894 calls to other builtin functions).
10896 The call may contain arguments which need to be evaluated, but
10897 which are not useful to determine the result of the call. In
10898 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10899 COMPOUND_EXPR will be an argument which must be evaluated.
10900 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10901 COMPOUND_EXPR in the chain will contain the tree for the simplified
10902 form of the builtin function call. */
10904 static tree
10905 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10907 if (!validate_arg (s1, POINTER_TYPE)
10908 || !validate_arg (s2, INTEGER_TYPE))
10909 return NULL_TREE;
10910 else
10912 tree fn;
10913 const char *p1;
10915 if (TREE_CODE (s2) != INTEGER_CST)
10916 return NULL_TREE;
10918 p1 = c_getstr (s1);
10919 if (p1 != NULL)
10921 char c;
10922 const char *r;
10923 tree tem;
10925 if (target_char_cast (s2, &c))
10926 return NULL_TREE;
10928 r = strrchr (p1, c);
10930 if (r == NULL)
10931 return build_int_cst (TREE_TYPE (s1), 0);
10933 /* Return an offset into the constant string argument. */
10934 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10935 return fold_convert_loc (loc, type, tem);
10938 if (! integer_zerop (s2))
10939 return NULL_TREE;
10941 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10942 if (!fn)
10943 return NULL_TREE;
10945 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10946 return build_call_expr_loc (loc, fn, 2, s1, s2);
10950 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10951 to the call, and TYPE is its return type.
10953 Return NULL_TREE if no simplification was possible, otherwise return the
10954 simplified form of the call as a tree.
10956 The simplified form may be a constant or other expression which
10957 computes the same value, but in a more efficient manner (including
10958 calls to other builtin functions).
10960 The call may contain arguments which need to be evaluated, but
10961 which are not useful to determine the result of the call. In
10962 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10963 COMPOUND_EXPR will be an argument which must be evaluated.
10964 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10965 COMPOUND_EXPR in the chain will contain the tree for the simplified
10966 form of the builtin function call. */
10968 static tree
10969 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10971 if (!validate_arg (s1, POINTER_TYPE)
10972 || !validate_arg (s2, POINTER_TYPE))
10973 return NULL_TREE;
10974 else
10976 tree fn;
10977 const char *p1, *p2;
10979 p2 = c_getstr (s2);
10980 if (p2 == NULL)
10981 return NULL_TREE;
10983 p1 = c_getstr (s1);
10984 if (p1 != NULL)
10986 const char *r = strpbrk (p1, p2);
10987 tree tem;
10989 if (r == NULL)
10990 return build_int_cst (TREE_TYPE (s1), 0);
10992 /* Return an offset into the constant string argument. */
10993 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10994 return fold_convert_loc (loc, type, tem);
10997 if (p2[0] == '\0')
10998 /* strpbrk(x, "") == NULL.
10999 Evaluate and ignore s1 in case it had side-effects. */
11000 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11002 if (p2[1] != '\0')
11003 return NULL_TREE; /* Really call strpbrk. */
11005 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11006 if (!fn)
11007 return NULL_TREE;
11009 /* New argument list transforming strpbrk(s1, s2) to
11010 strchr(s1, s2[0]). */
11011 return build_call_expr_loc (loc, fn, 2, s1,
11012 build_int_cst (integer_type_node, p2[0]));
11016 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11017 arguments to the call.
11019 Return NULL_TREE if no simplification was possible, otherwise return the
11020 simplified form of the call as a tree.
11022 The simplified form may be a constant or other expression which
11023 computes the same value, but in a more efficient manner (including
11024 calls to other builtin functions).
11026 The call may contain arguments which need to be evaluated, but
11027 which are not useful to determine the result of the call. In
11028 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11029 COMPOUND_EXPR will be an argument which must be evaluated.
11030 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11031 COMPOUND_EXPR in the chain will contain the tree for the simplified
11032 form of the builtin function call. */
11034 static tree
11035 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11037 if (!validate_arg (dst, POINTER_TYPE)
11038 || !validate_arg (src, POINTER_TYPE)
11039 || !validate_arg (len, INTEGER_TYPE))
11040 return NULL_TREE;
11041 else
11043 const char *p = c_getstr (src);
11045 /* If the requested length is zero, or the src parameter string
11046 length is zero, return the dst parameter. */
11047 if (integer_zerop (len) || (p && *p == '\0'))
11048 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11050 /* If the requested len is greater than or equal to the string
11051 length, call strcat. */
11052 if (TREE_CODE (len) == INTEGER_CST && p
11053 && compare_tree_int (len, strlen (p)) >= 0)
11055 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11057 /* If the replacement _DECL isn't initialized, don't do the
11058 transformation. */
11059 if (!fn)
11060 return NULL_TREE;
11062 return build_call_expr_loc (loc, fn, 2, dst, src);
11064 return NULL_TREE;
11068 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11069 to the call.
11071 Return NULL_TREE if no simplification was possible, otherwise return the
11072 simplified form of the call as a tree.
11074 The simplified form may be a constant or other expression which
11075 computes the same value, but in a more efficient manner (including
11076 calls to other builtin functions).
11078 The call may contain arguments which need to be evaluated, but
11079 which are not useful to determine the result of the call. In
11080 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11081 COMPOUND_EXPR will be an argument which must be evaluated.
11082 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11083 COMPOUND_EXPR in the chain will contain the tree for the simplified
11084 form of the builtin function call. */
11086 static tree
11087 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11089 if (!validate_arg (s1, POINTER_TYPE)
11090 || !validate_arg (s2, POINTER_TYPE))
11091 return NULL_TREE;
11092 else
11094 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11096 /* If both arguments are constants, evaluate at compile-time. */
11097 if (p1 && p2)
11099 const size_t r = strspn (p1, p2);
11100 return build_int_cst (size_type_node, r);
11103 /* If either argument is "", return NULL_TREE. */
11104 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11105 /* Evaluate and ignore both arguments in case either one has
11106 side-effects. */
11107 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11108 s1, s2);
11109 return NULL_TREE;
11113 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11114 to the call.
11116 Return NULL_TREE if no simplification was possible, otherwise return the
11117 simplified form of the call as a tree.
11119 The simplified form may be a constant or other expression which
11120 computes the same value, but in a more efficient manner (including
11121 calls to other builtin functions).
11123 The call may contain arguments which need to be evaluated, but
11124 which are not useful to determine the result of the call. In
11125 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11126 COMPOUND_EXPR will be an argument which must be evaluated.
11127 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11128 COMPOUND_EXPR in the chain will contain the tree for the simplified
11129 form of the builtin function call. */
11131 static tree
11132 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11134 if (!validate_arg (s1, POINTER_TYPE)
11135 || !validate_arg (s2, POINTER_TYPE))
11136 return NULL_TREE;
11137 else
11139 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11141 /* If both arguments are constants, evaluate at compile-time. */
11142 if (p1 && p2)
11144 const size_t r = strcspn (p1, p2);
11145 return build_int_cst (size_type_node, r);
11148 /* If the first argument is "", return NULL_TREE. */
11149 if (p1 && *p1 == '\0')
11151 /* Evaluate and ignore argument s2 in case it has
11152 side-effects. */
11153 return omit_one_operand_loc (loc, size_type_node,
11154 size_zero_node, s2);
11157 /* If the second argument is "", return __builtin_strlen(s1). */
11158 if (p2 && *p2 == '\0')
11160 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11162 /* If the replacement _DECL isn't initialized, don't do the
11163 transformation. */
11164 if (!fn)
11165 return NULL_TREE;
11167 return build_call_expr_loc (loc, fn, 1, s1);
11169 return NULL_TREE;
11173 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11174 produced. False otherwise. This is done so that we don't output the error
11175 or warning twice or three times. */
11177 bool
11178 fold_builtin_next_arg (tree exp, bool va_start_p)
11180 tree fntype = TREE_TYPE (current_function_decl);
11181 int nargs = call_expr_nargs (exp);
11182 tree arg;
11183 /* There is good chance the current input_location points inside the
11184 definition of the va_start macro (perhaps on the token for
11185 builtin) in a system header, so warnings will not be emitted.
11186 Use the location in real source code. */
11187 source_location current_location =
11188 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11189 NULL);
11191 if (!stdarg_p (fntype))
11193 error ("%<va_start%> used in function with fixed args");
11194 return true;
11197 if (va_start_p)
11199 if (va_start_p && (nargs != 2))
11201 error ("wrong number of arguments to function %<va_start%>");
11202 return true;
11204 arg = CALL_EXPR_ARG (exp, 1);
11206 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11207 when we checked the arguments and if needed issued a warning. */
11208 else
11210 if (nargs == 0)
11212 /* Evidently an out of date version of <stdarg.h>; can't validate
11213 va_start's second argument, but can still work as intended. */
11214 warning_at (current_location,
11215 OPT_Wvarargs,
11216 "%<__builtin_next_arg%> called without an argument");
11217 return true;
11219 else if (nargs > 1)
11221 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11222 return true;
11224 arg = CALL_EXPR_ARG (exp, 0);
11227 if (TREE_CODE (arg) == SSA_NAME)
11228 arg = SSA_NAME_VAR (arg);
11230 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11231 or __builtin_next_arg (0) the first time we see it, after checking
11232 the arguments and if needed issuing a warning. */
11233 if (!integer_zerop (arg))
11235 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11237 /* Strip off all nops for the sake of the comparison. This
11238 is not quite the same as STRIP_NOPS. It does more.
11239 We must also strip off INDIRECT_EXPR for C++ reference
11240 parameters. */
11241 while (CONVERT_EXPR_P (arg)
11242 || TREE_CODE (arg) == INDIRECT_REF)
11243 arg = TREE_OPERAND (arg, 0);
11244 if (arg != last_parm)
11246 /* FIXME: Sometimes with the tree optimizers we can get the
11247 not the last argument even though the user used the last
11248 argument. We just warn and set the arg to be the last
11249 argument so that we will get wrong-code because of
11250 it. */
11251 warning_at (current_location,
11252 OPT_Wvarargs,
11253 "second parameter of %<va_start%> not last named argument");
11256 /* Undefined by C99 7.15.1.4p4 (va_start):
11257 "If the parameter parmN is declared with the register storage
11258 class, with a function or array type, or with a type that is
11259 not compatible with the type that results after application of
11260 the default argument promotions, the behavior is undefined."
11262 else if (DECL_REGISTER (arg))
11264 warning_at (current_location,
11265 OPT_Wvarargs,
11266 "undefined behaviour when second parameter of "
11267 "%<va_start%> is declared with %<register%> storage");
11270 /* We want to verify the second parameter just once before the tree
11271 optimizers are run and then avoid keeping it in the tree,
11272 as otherwise we could warn even for correct code like:
11273 void foo (int i, ...)
11274 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11275 if (va_start_p)
11276 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11277 else
11278 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11280 return false;
11284 /* Expand a call EXP to __builtin_object_size. */
11286 static rtx
11287 expand_builtin_object_size (tree exp)
11289 tree ost;
11290 int object_size_type;
11291 tree fndecl = get_callee_fndecl (exp);
11293 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11295 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11296 exp, fndecl);
11297 expand_builtin_trap ();
11298 return const0_rtx;
11301 ost = CALL_EXPR_ARG (exp, 1);
11302 STRIP_NOPS (ost);
11304 if (TREE_CODE (ost) != INTEGER_CST
11305 || tree_int_cst_sgn (ost) < 0
11306 || compare_tree_int (ost, 3) > 0)
11308 error ("%Klast argument of %D is not integer constant between 0 and 3",
11309 exp, fndecl);
11310 expand_builtin_trap ();
11311 return const0_rtx;
11314 object_size_type = tree_to_shwi (ost);
11316 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11319 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11320 FCODE is the BUILT_IN_* to use.
11321 Return NULL_RTX if we failed; the caller should emit a normal call,
11322 otherwise try to get the result in TARGET, if convenient (and in
11323 mode MODE if that's convenient). */
11325 static rtx
11326 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11327 enum built_in_function fcode)
11329 tree dest, src, len, size;
11331 if (!validate_arglist (exp,
11332 POINTER_TYPE,
11333 fcode == BUILT_IN_MEMSET_CHK
11334 ? INTEGER_TYPE : POINTER_TYPE,
11335 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11336 return NULL_RTX;
11338 dest = CALL_EXPR_ARG (exp, 0);
11339 src = CALL_EXPR_ARG (exp, 1);
11340 len = CALL_EXPR_ARG (exp, 2);
11341 size = CALL_EXPR_ARG (exp, 3);
11343 if (! tree_fits_uhwi_p (size))
11344 return NULL_RTX;
11346 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11348 tree fn;
11350 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11352 warning_at (tree_nonartificial_location (exp),
11353 0, "%Kcall to %D will always overflow destination buffer",
11354 exp, get_callee_fndecl (exp));
11355 return NULL_RTX;
11358 fn = NULL_TREE;
11359 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11360 mem{cpy,pcpy,move,set} is available. */
11361 switch (fcode)
11363 case BUILT_IN_MEMCPY_CHK:
11364 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11365 break;
11366 case BUILT_IN_MEMPCPY_CHK:
11367 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11368 break;
11369 case BUILT_IN_MEMMOVE_CHK:
11370 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11371 break;
11372 case BUILT_IN_MEMSET_CHK:
11373 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11374 break;
11375 default:
11376 break;
11379 if (! fn)
11380 return NULL_RTX;
11382 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11383 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11384 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11385 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11387 else if (fcode == BUILT_IN_MEMSET_CHK)
11388 return NULL_RTX;
11389 else
11391 unsigned int dest_align = get_pointer_alignment (dest);
11393 /* If DEST is not a pointer type, call the normal function. */
11394 if (dest_align == 0)
11395 return NULL_RTX;
11397 /* If SRC and DEST are the same (and not volatile), do nothing. */
11398 if (operand_equal_p (src, dest, 0))
11400 tree expr;
11402 if (fcode != BUILT_IN_MEMPCPY_CHK)
11404 /* Evaluate and ignore LEN in case it has side-effects. */
11405 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11406 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11409 expr = fold_build_pointer_plus (dest, len);
11410 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11413 /* __memmove_chk special case. */
11414 if (fcode == BUILT_IN_MEMMOVE_CHK)
11416 unsigned int src_align = get_pointer_alignment (src);
11418 if (src_align == 0)
11419 return NULL_RTX;
11421 /* If src is categorized for a readonly section we can use
11422 normal __memcpy_chk. */
11423 if (readonly_data_expr (src))
11425 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11426 if (!fn)
11427 return NULL_RTX;
11428 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11429 dest, src, len, size);
11430 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11431 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11432 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11435 return NULL_RTX;
11439 /* Emit warning if a buffer overflow is detected at compile time. */
11441 static void
11442 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11444 int is_strlen = 0;
11445 tree len, size;
11446 location_t loc = tree_nonartificial_location (exp);
11448 switch (fcode)
11450 case BUILT_IN_STRCPY_CHK:
11451 case BUILT_IN_STPCPY_CHK:
11452 /* For __strcat_chk the warning will be emitted only if overflowing
11453 by at least strlen (dest) + 1 bytes. */
11454 case BUILT_IN_STRCAT_CHK:
11455 len = CALL_EXPR_ARG (exp, 1);
11456 size = CALL_EXPR_ARG (exp, 2);
11457 is_strlen = 1;
11458 break;
11459 case BUILT_IN_STRNCAT_CHK:
11460 case BUILT_IN_STRNCPY_CHK:
11461 case BUILT_IN_STPNCPY_CHK:
11462 len = CALL_EXPR_ARG (exp, 2);
11463 size = CALL_EXPR_ARG (exp, 3);
11464 break;
11465 case BUILT_IN_SNPRINTF_CHK:
11466 case BUILT_IN_VSNPRINTF_CHK:
11467 len = CALL_EXPR_ARG (exp, 1);
11468 size = CALL_EXPR_ARG (exp, 3);
11469 break;
11470 default:
11471 gcc_unreachable ();
11474 if (!len || !size)
11475 return;
11477 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11478 return;
11480 if (is_strlen)
11482 len = c_strlen (len, 1);
11483 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11484 return;
11486 else if (fcode == BUILT_IN_STRNCAT_CHK)
11488 tree src = CALL_EXPR_ARG (exp, 1);
11489 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11490 return;
11491 src = c_strlen (src, 1);
11492 if (! src || ! tree_fits_uhwi_p (src))
11494 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11495 exp, get_callee_fndecl (exp));
11496 return;
11498 else if (tree_int_cst_lt (src, size))
11499 return;
11501 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11502 return;
11504 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11505 exp, get_callee_fndecl (exp));
11508 /* Emit warning if a buffer overflow is detected at compile time
11509 in __sprintf_chk/__vsprintf_chk calls. */
11511 static void
11512 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11514 tree size, len, fmt;
11515 const char *fmt_str;
11516 int nargs = call_expr_nargs (exp);
11518 /* Verify the required arguments in the original call. */
11520 if (nargs < 4)
11521 return;
11522 size = CALL_EXPR_ARG (exp, 2);
11523 fmt = CALL_EXPR_ARG (exp, 3);
11525 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11526 return;
11528 /* Check whether the format is a literal string constant. */
11529 fmt_str = c_getstr (fmt);
11530 if (fmt_str == NULL)
11531 return;
11533 if (!init_target_chars ())
11534 return;
11536 /* If the format doesn't contain % args or %%, we know its size. */
11537 if (strchr (fmt_str, target_percent) == 0)
11538 len = build_int_cstu (size_type_node, strlen (fmt_str));
11539 /* If the format is "%s" and first ... argument is a string literal,
11540 we know it too. */
11541 else if (fcode == BUILT_IN_SPRINTF_CHK
11542 && strcmp (fmt_str, target_percent_s) == 0)
11544 tree arg;
11546 if (nargs < 5)
11547 return;
11548 arg = CALL_EXPR_ARG (exp, 4);
11549 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11550 return;
11552 len = c_strlen (arg, 1);
11553 if (!len || ! tree_fits_uhwi_p (len))
11554 return;
11556 else
11557 return;
11559 if (! tree_int_cst_lt (len, size))
11560 warning_at (tree_nonartificial_location (exp),
11561 0, "%Kcall to %D will always overflow destination buffer",
11562 exp, get_callee_fndecl (exp));
11565 /* Emit warning if a free is called with address of a variable. */
11567 static void
11568 maybe_emit_free_warning (tree exp)
11570 tree arg = CALL_EXPR_ARG (exp, 0);
11572 STRIP_NOPS (arg);
11573 if (TREE_CODE (arg) != ADDR_EXPR)
11574 return;
11576 arg = get_base_address (TREE_OPERAND (arg, 0));
11577 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11578 return;
11580 if (SSA_VAR_P (arg))
11581 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11582 "%Kattempt to free a non-heap object %qD", exp, arg);
11583 else
11584 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11585 "%Kattempt to free a non-heap object", exp);
11588 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11589 if possible. */
11591 static tree
11592 fold_builtin_object_size (tree ptr, tree ost)
11594 unsigned HOST_WIDE_INT bytes;
11595 int object_size_type;
11597 if (!validate_arg (ptr, POINTER_TYPE)
11598 || !validate_arg (ost, INTEGER_TYPE))
11599 return NULL_TREE;
11601 STRIP_NOPS (ost);
11603 if (TREE_CODE (ost) != INTEGER_CST
11604 || tree_int_cst_sgn (ost) < 0
11605 || compare_tree_int (ost, 3) > 0)
11606 return NULL_TREE;
11608 object_size_type = tree_to_shwi (ost);
11610 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11611 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11612 and (size_t) 0 for types 2 and 3. */
11613 if (TREE_SIDE_EFFECTS (ptr))
11614 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11616 if (TREE_CODE (ptr) == ADDR_EXPR)
11618 bytes = compute_builtin_object_size (ptr, object_size_type);
11619 if (wi::fits_to_tree_p (bytes, size_type_node))
11620 return build_int_cstu (size_type_node, bytes);
11622 else if (TREE_CODE (ptr) == SSA_NAME)
11624 /* If object size is not known yet, delay folding until
11625 later. Maybe subsequent passes will help determining
11626 it. */
11627 bytes = compute_builtin_object_size (ptr, object_size_type);
11628 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11629 && wi::fits_to_tree_p (bytes, size_type_node))
11630 return build_int_cstu (size_type_node, bytes);
11633 return NULL_TREE;
11636 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
11637 LEN, and SIZE. */
11639 static tree
11640 fold_builtin_strncat_chk (location_t loc, tree fndecl,
11641 tree dest, tree src, tree len, tree size)
11643 tree fn;
11644 const char *p;
11646 if (!validate_arg (dest, POINTER_TYPE)
11647 || !validate_arg (src, POINTER_TYPE)
11648 || !validate_arg (size, INTEGER_TYPE)
11649 || !validate_arg (size, INTEGER_TYPE))
11650 return NULL_TREE;
11652 p = c_getstr (src);
11653 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
11654 if (p && *p == '\0')
11655 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11656 else if (integer_zerop (len))
11657 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11659 if (! tree_fits_uhwi_p (size))
11660 return NULL_TREE;
11662 if (! integer_all_onesp (size))
11664 tree src_len = c_strlen (src, 1);
11665 if (src_len
11666 && tree_fits_uhwi_p (src_len)
11667 && tree_fits_uhwi_p (len)
11668 && ! tree_int_cst_lt (len, src_len))
11670 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
11671 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
11672 if (!fn)
11673 return NULL_TREE;
11675 return build_call_expr_loc (loc, fn, 3, dest, src, size);
11677 return NULL_TREE;
11680 /* If __builtin_strncat_chk is used, assume strncat is available. */
11681 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
11682 if (!fn)
11683 return NULL_TREE;
11685 return build_call_expr_loc (loc, fn, 3, dest, src, len);
11688 /* Builtins with folding operations that operate on "..." arguments
11689 need special handling; we need to store the arguments in a convenient
11690 data structure before attempting any folding. Fortunately there are
11691 only a few builtins that fall into this category. FNDECL is the
11692 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11693 result of the function call is ignored. */
11695 static tree
11696 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11697 bool ignore ATTRIBUTE_UNUSED)
11699 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11700 tree ret = NULL_TREE;
11702 switch (fcode)
11704 case BUILT_IN_FPCLASSIFY:
11705 ret = fold_builtin_fpclassify (loc, exp);
11706 break;
11708 default:
11709 break;
11711 if (ret)
11713 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11714 SET_EXPR_LOCATION (ret, loc);
11715 TREE_NO_WARNING (ret) = 1;
11716 return ret;
11718 return NULL_TREE;
11721 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
11722 FMT and ARG are the arguments to the call; we don't fold cases with
11723 more than 2 arguments, and ARG may be null if this is a 1-argument case.
11725 Return NULL_TREE if no simplification was possible, otherwise return the
11726 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11727 code of the function to be simplified. */
11729 static tree
11730 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
11731 tree arg, bool ignore,
11732 enum built_in_function fcode)
11734 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
11735 const char *fmt_str = NULL;
11737 /* If the return value is used, don't do the transformation. */
11738 if (! ignore)
11739 return NULL_TREE;
11741 /* Verify the required arguments in the original call. */
11742 if (!validate_arg (fmt, POINTER_TYPE))
11743 return NULL_TREE;
11745 /* Check whether the format is a literal string constant. */
11746 fmt_str = c_getstr (fmt);
11747 if (fmt_str == NULL)
11748 return NULL_TREE;
11750 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
11752 /* If we're using an unlocked function, assume the other
11753 unlocked functions exist explicitly. */
11754 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
11755 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
11757 else
11759 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
11760 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
11763 if (!init_target_chars ())
11764 return NULL_TREE;
11766 if (strcmp (fmt_str, target_percent_s) == 0
11767 || strchr (fmt_str, target_percent) == NULL)
11769 const char *str;
11771 if (strcmp (fmt_str, target_percent_s) == 0)
11773 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11774 return NULL_TREE;
11776 if (!arg || !validate_arg (arg, POINTER_TYPE))
11777 return NULL_TREE;
11779 str = c_getstr (arg);
11780 if (str == NULL)
11781 return NULL_TREE;
11783 else
11785 /* The format specifier doesn't contain any '%' characters. */
11786 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
11787 && arg)
11788 return NULL_TREE;
11789 str = fmt_str;
11792 /* If the string was "", printf does nothing. */
11793 if (str[0] == '\0')
11794 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
11796 /* If the string has length of 1, call putchar. */
11797 if (str[1] == '\0')
11799 /* Given printf("c"), (where c is any one character,)
11800 convert "c"[0] to an int and pass that to the replacement
11801 function. */
11802 newarg = build_int_cst (integer_type_node, str[0]);
11803 if (fn_putchar)
11804 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
11806 else
11808 /* If the string was "string\n", call puts("string"). */
11809 size_t len = strlen (str);
11810 if ((unsigned char)str[len - 1] == target_newline
11811 && (size_t) (int) len == len
11812 && (int) len > 0)
11814 char *newstr;
11815 tree offset_node, string_cst;
11817 /* Create a NUL-terminated string that's one char shorter
11818 than the original, stripping off the trailing '\n'. */
11819 newarg = build_string_literal (len, str);
11820 string_cst = string_constant (newarg, &offset_node);
11821 gcc_checking_assert (string_cst
11822 && (TREE_STRING_LENGTH (string_cst)
11823 == (int) len)
11824 && integer_zerop (offset_node)
11825 && (unsigned char)
11826 TREE_STRING_POINTER (string_cst)[len - 1]
11827 == target_newline);
11828 /* build_string_literal creates a new STRING_CST,
11829 modify it in place to avoid double copying. */
11830 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
11831 newstr[len - 1] = '\0';
11832 if (fn_puts)
11833 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
11835 else
11836 /* We'd like to arrange to call fputs(string,stdout) here,
11837 but we need stdout and don't have a way to get it yet. */
11838 return NULL_TREE;
11842 /* The other optimizations can be done only on the non-va_list variants. */
11843 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11844 return NULL_TREE;
11846 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
11847 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
11849 if (!arg || !validate_arg (arg, POINTER_TYPE))
11850 return NULL_TREE;
11851 if (fn_puts)
11852 call = build_call_expr_loc (loc, fn_puts, 1, arg);
11855 /* If the format specifier was "%c", call __builtin_putchar(arg). */
11856 else if (strcmp (fmt_str, target_percent_c) == 0)
11858 if (!arg || !validate_arg (arg, INTEGER_TYPE))
11859 return NULL_TREE;
11860 if (fn_putchar)
11861 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
11864 if (!call)
11865 return NULL_TREE;
11867 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
11870 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
11871 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
11872 more than 3 arguments, and ARG may be null in the 2-argument case.
11874 Return NULL_TREE if no simplification was possible, otherwise return the
11875 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11876 code of the function to be simplified. */
11878 static tree
11879 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
11880 tree fmt, tree arg, bool ignore,
11881 enum built_in_function fcode)
11883 tree fn_fputc, fn_fputs, call = NULL_TREE;
11884 const char *fmt_str = NULL;
11886 /* If the return value is used, don't do the transformation. */
11887 if (! ignore)
11888 return NULL_TREE;
11890 /* Verify the required arguments in the original call. */
11891 if (!validate_arg (fp, POINTER_TYPE))
11892 return NULL_TREE;
11893 if (!validate_arg (fmt, POINTER_TYPE))
11894 return NULL_TREE;
11896 /* Check whether the format is a literal string constant. */
11897 fmt_str = c_getstr (fmt);
11898 if (fmt_str == NULL)
11899 return NULL_TREE;
11901 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
11903 /* If we're using an unlocked function, assume the other
11904 unlocked functions exist explicitly. */
11905 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
11906 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
11908 else
11910 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
11911 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
11914 if (!init_target_chars ())
11915 return NULL_TREE;
11917 /* If the format doesn't contain % args or %%, use strcpy. */
11918 if (strchr (fmt_str, target_percent) == NULL)
11920 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
11921 && arg)
11922 return NULL_TREE;
11924 /* If the format specifier was "", fprintf does nothing. */
11925 if (fmt_str[0] == '\0')
11927 /* If FP has side-effects, just wait until gimplification is
11928 done. */
11929 if (TREE_SIDE_EFFECTS (fp))
11930 return NULL_TREE;
11932 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
11935 /* When "string" doesn't contain %, replace all cases of
11936 fprintf (fp, string) with fputs (string, fp). The fputs
11937 builtin will take care of special cases like length == 1. */
11938 if (fn_fputs)
11939 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
11942 /* The other optimizations can be done only on the non-va_list variants. */
11943 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
11944 return NULL_TREE;
11946 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
11947 else if (strcmp (fmt_str, target_percent_s) == 0)
11949 if (!arg || !validate_arg (arg, POINTER_TYPE))
11950 return NULL_TREE;
11951 if (fn_fputs)
11952 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
11955 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
11956 else if (strcmp (fmt_str, target_percent_c) == 0)
11958 if (!arg || !validate_arg (arg, INTEGER_TYPE))
11959 return NULL_TREE;
11960 if (fn_fputc)
11961 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
11964 if (!call)
11965 return NULL_TREE;
11966 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
11969 /* Initialize format string characters in the target charset. */
11971 bool
11972 init_target_chars (void)
11974 static bool init;
11975 if (!init)
11977 target_newline = lang_hooks.to_target_charset ('\n');
11978 target_percent = lang_hooks.to_target_charset ('%');
11979 target_c = lang_hooks.to_target_charset ('c');
11980 target_s = lang_hooks.to_target_charset ('s');
11981 if (target_newline == 0 || target_percent == 0 || target_c == 0
11982 || target_s == 0)
11983 return false;
11985 target_percent_c[0] = target_percent;
11986 target_percent_c[1] = target_c;
11987 target_percent_c[2] = '\0';
11989 target_percent_s[0] = target_percent;
11990 target_percent_s[1] = target_s;
11991 target_percent_s[2] = '\0';
11993 target_percent_s_newline[0] = target_percent;
11994 target_percent_s_newline[1] = target_s;
11995 target_percent_s_newline[2] = target_newline;
11996 target_percent_s_newline[3] = '\0';
11998 init = true;
12000 return true;
12003 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12004 and no overflow/underflow occurred. INEXACT is true if M was not
12005 exactly calculated. TYPE is the tree type for the result. This
12006 function assumes that you cleared the MPFR flags and then
12007 calculated M to see if anything subsequently set a flag prior to
12008 entering this function. Return NULL_TREE if any checks fail. */
12010 static tree
12011 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12013 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12014 overflow/underflow occurred. If -frounding-math, proceed iff the
12015 result of calling FUNC was exact. */
12016 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12017 && (!flag_rounding_math || !inexact))
12019 REAL_VALUE_TYPE rr;
12021 real_from_mpfr (&rr, m, type, GMP_RNDN);
12022 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12023 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12024 but the mpft_t is not, then we underflowed in the
12025 conversion. */
12026 if (real_isfinite (&rr)
12027 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12029 REAL_VALUE_TYPE rmode;
12031 real_convert (&rmode, TYPE_MODE (type), &rr);
12032 /* Proceed iff the specified mode can hold the value. */
12033 if (real_identical (&rmode, &rr))
12034 return build_real (type, rmode);
12037 return NULL_TREE;
12040 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12041 number and no overflow/underflow occurred. INEXACT is true if M
12042 was not exactly calculated. TYPE is the tree type for the result.
12043 This function assumes that you cleared the MPFR flags and then
12044 calculated M to see if anything subsequently set a flag prior to
12045 entering this function. Return NULL_TREE if any checks fail, if
12046 FORCE_CONVERT is true, then bypass the checks. */
12048 static tree
12049 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12051 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12052 overflow/underflow occurred. If -frounding-math, proceed iff the
12053 result of calling FUNC was exact. */
12054 if (force_convert
12055 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12056 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12057 && (!flag_rounding_math || !inexact)))
12059 REAL_VALUE_TYPE re, im;
12061 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12062 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12063 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12064 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12065 but the mpft_t is not, then we underflowed in the
12066 conversion. */
12067 if (force_convert
12068 || (real_isfinite (&re) && real_isfinite (&im)
12069 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12070 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12072 REAL_VALUE_TYPE re_mode, im_mode;
12074 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12075 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12076 /* Proceed iff the specified mode can hold the value. */
12077 if (force_convert
12078 || (real_identical (&re_mode, &re)
12079 && real_identical (&im_mode, &im)))
12080 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12081 build_real (TREE_TYPE (type), im_mode));
12084 return NULL_TREE;
12087 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12088 FUNC on it and return the resulting value as a tree with type TYPE.
12089 If MIN and/or MAX are not NULL, then the supplied ARG must be
12090 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12091 acceptable values, otherwise they are not. The mpfr precision is
12092 set to the precision of TYPE. We assume that function FUNC returns
12093 zero if the result could be calculated exactly within the requested
12094 precision. */
12096 static tree
12097 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12098 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12099 bool inclusive)
12101 tree result = NULL_TREE;
12103 STRIP_NOPS (arg);
12105 /* To proceed, MPFR must exactly represent the target floating point
12106 format, which only happens when the target base equals two. */
12107 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12108 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12110 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12112 if (real_isfinite (ra)
12113 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12114 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12116 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12117 const int prec = fmt->p;
12118 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12119 int inexact;
12120 mpfr_t m;
12122 mpfr_init2 (m, prec);
12123 mpfr_from_real (m, ra, GMP_RNDN);
12124 mpfr_clear_flags ();
12125 inexact = func (m, m, rnd);
12126 result = do_mpfr_ckconv (m, type, inexact);
12127 mpfr_clear (m);
12131 return result;
12134 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12135 FUNC on it and return the resulting value as a tree with type TYPE.
12136 The mpfr precision is set to the precision of TYPE. We assume that
12137 function FUNC returns zero if the result could be calculated
12138 exactly within the requested precision. */
12140 static tree
12141 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12142 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12144 tree result = NULL_TREE;
12146 STRIP_NOPS (arg1);
12147 STRIP_NOPS (arg2);
12149 /* To proceed, MPFR must exactly represent the target floating point
12150 format, which only happens when the target base equals two. */
12151 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12152 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12153 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12155 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12156 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12158 if (real_isfinite (ra1) && real_isfinite (ra2))
12160 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12161 const int prec = fmt->p;
12162 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12163 int inexact;
12164 mpfr_t m1, m2;
12166 mpfr_inits2 (prec, m1, m2, NULL);
12167 mpfr_from_real (m1, ra1, GMP_RNDN);
12168 mpfr_from_real (m2, ra2, GMP_RNDN);
12169 mpfr_clear_flags ();
12170 inexact = func (m1, m1, m2, rnd);
12171 result = do_mpfr_ckconv (m1, type, inexact);
12172 mpfr_clears (m1, m2, NULL);
12176 return result;
12179 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12180 FUNC on it and return the resulting value as a tree with type TYPE.
12181 The mpfr precision is set to the precision of TYPE. We assume that
12182 function FUNC returns zero if the result could be calculated
12183 exactly within the requested precision. */
12185 static tree
12186 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12187 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12189 tree result = NULL_TREE;
12191 STRIP_NOPS (arg1);
12192 STRIP_NOPS (arg2);
12193 STRIP_NOPS (arg3);
12195 /* To proceed, MPFR must exactly represent the target floating point
12196 format, which only happens when the target base equals two. */
12197 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12198 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12199 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12200 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12202 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12203 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12204 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12206 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12208 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12209 const int prec = fmt->p;
12210 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12211 int inexact;
12212 mpfr_t m1, m2, m3;
12214 mpfr_inits2 (prec, m1, m2, m3, NULL);
12215 mpfr_from_real (m1, ra1, GMP_RNDN);
12216 mpfr_from_real (m2, ra2, GMP_RNDN);
12217 mpfr_from_real (m3, ra3, GMP_RNDN);
12218 mpfr_clear_flags ();
12219 inexact = func (m1, m1, m2, m3, rnd);
12220 result = do_mpfr_ckconv (m1, type, inexact);
12221 mpfr_clears (m1, m2, m3, NULL);
12225 return result;
12228 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12229 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12230 If ARG_SINP and ARG_COSP are NULL then the result is returned
12231 as a complex value.
12232 The type is taken from the type of ARG and is used for setting the
12233 precision of the calculation and results. */
12235 static tree
12236 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12238 tree const type = TREE_TYPE (arg);
12239 tree result = NULL_TREE;
12241 STRIP_NOPS (arg);
12243 /* To proceed, MPFR must exactly represent the target floating point
12244 format, which only happens when the target base equals two. */
12245 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12246 && TREE_CODE (arg) == REAL_CST
12247 && !TREE_OVERFLOW (arg))
12249 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12251 if (real_isfinite (ra))
12253 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12254 const int prec = fmt->p;
12255 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12256 tree result_s, result_c;
12257 int inexact;
12258 mpfr_t m, ms, mc;
12260 mpfr_inits2 (prec, m, ms, mc, NULL);
12261 mpfr_from_real (m, ra, GMP_RNDN);
12262 mpfr_clear_flags ();
12263 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12264 result_s = do_mpfr_ckconv (ms, type, inexact);
12265 result_c = do_mpfr_ckconv (mc, type, inexact);
12266 mpfr_clears (m, ms, mc, NULL);
12267 if (result_s && result_c)
12269 /* If we are to return in a complex value do so. */
12270 if (!arg_sinp && !arg_cosp)
12271 return build_complex (build_complex_type (type),
12272 result_c, result_s);
12274 /* Dereference the sin/cos pointer arguments. */
12275 arg_sinp = build_fold_indirect_ref (arg_sinp);
12276 arg_cosp = build_fold_indirect_ref (arg_cosp);
12277 /* Proceed if valid pointer type were passed in. */
12278 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12279 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12281 /* Set the values. */
12282 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12283 result_s);
12284 TREE_SIDE_EFFECTS (result_s) = 1;
12285 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12286 result_c);
12287 TREE_SIDE_EFFECTS (result_c) = 1;
12288 /* Combine the assignments into a compound expr. */
12289 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12290 result_s, result_c));
12295 return result;
12298 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12299 two-argument mpfr order N Bessel function FUNC on them and return
12300 the resulting value as a tree with type TYPE. The mpfr precision
12301 is set to the precision of TYPE. We assume that function FUNC
12302 returns zero if the result could be calculated exactly within the
12303 requested precision. */
12304 static tree
12305 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12306 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12307 const REAL_VALUE_TYPE *min, bool inclusive)
12309 tree result = NULL_TREE;
12311 STRIP_NOPS (arg1);
12312 STRIP_NOPS (arg2);
12314 /* To proceed, MPFR must exactly represent the target floating point
12315 format, which only happens when the target base equals two. */
12316 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12317 && tree_fits_shwi_p (arg1)
12318 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12320 const HOST_WIDE_INT n = tree_to_shwi (arg1);
12321 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12323 if (n == (long)n
12324 && real_isfinite (ra)
12325 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12327 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12328 const int prec = fmt->p;
12329 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12330 int inexact;
12331 mpfr_t m;
12333 mpfr_init2 (m, prec);
12334 mpfr_from_real (m, ra, GMP_RNDN);
12335 mpfr_clear_flags ();
12336 inexact = func (m, n, m, rnd);
12337 result = do_mpfr_ckconv (m, type, inexact);
12338 mpfr_clear (m);
12342 return result;
12345 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12346 the pointer *(ARG_QUO) and return the result. The type is taken
12347 from the type of ARG0 and is used for setting the precision of the
12348 calculation and results. */
12350 static tree
12351 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12353 tree const type = TREE_TYPE (arg0);
12354 tree result = NULL_TREE;
12356 STRIP_NOPS (arg0);
12357 STRIP_NOPS (arg1);
12359 /* To proceed, MPFR must exactly represent the target floating point
12360 format, which only happens when the target base equals two. */
12361 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12362 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12363 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12365 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12366 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12368 if (real_isfinite (ra0) && real_isfinite (ra1))
12370 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12371 const int prec = fmt->p;
12372 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12373 tree result_rem;
12374 long integer_quo;
12375 mpfr_t m0, m1;
12377 mpfr_inits2 (prec, m0, m1, NULL);
12378 mpfr_from_real (m0, ra0, GMP_RNDN);
12379 mpfr_from_real (m1, ra1, GMP_RNDN);
12380 mpfr_clear_flags ();
12381 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12382 /* Remquo is independent of the rounding mode, so pass
12383 inexact=0 to do_mpfr_ckconv(). */
12384 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12385 mpfr_clears (m0, m1, NULL);
12386 if (result_rem)
12388 /* MPFR calculates quo in the host's long so it may
12389 return more bits in quo than the target int can hold
12390 if sizeof(host long) > sizeof(target int). This can
12391 happen even for native compilers in LP64 mode. In
12392 these cases, modulo the quo value with the largest
12393 number that the target int can hold while leaving one
12394 bit for the sign. */
12395 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12396 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12398 /* Dereference the quo pointer argument. */
12399 arg_quo = build_fold_indirect_ref (arg_quo);
12400 /* Proceed iff a valid pointer type was passed in. */
12401 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12403 /* Set the value. */
12404 tree result_quo
12405 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12406 build_int_cst (TREE_TYPE (arg_quo),
12407 integer_quo));
12408 TREE_SIDE_EFFECTS (result_quo) = 1;
12409 /* Combine the quo assignment with the rem. */
12410 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12411 result_quo, result_rem));
12416 return result;
12419 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12420 resulting value as a tree with type TYPE. The mpfr precision is
12421 set to the precision of TYPE. We assume that this mpfr function
12422 returns zero if the result could be calculated exactly within the
12423 requested precision. In addition, the integer pointer represented
12424 by ARG_SG will be dereferenced and set to the appropriate signgam
12425 (-1,1) value. */
12427 static tree
12428 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12430 tree result = NULL_TREE;
12432 STRIP_NOPS (arg);
12434 /* To proceed, MPFR must exactly represent the target floating point
12435 format, which only happens when the target base equals two. Also
12436 verify ARG is a constant and that ARG_SG is an int pointer. */
12437 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12438 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12439 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12440 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12442 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12444 /* In addition to NaN and Inf, the argument cannot be zero or a
12445 negative integer. */
12446 if (real_isfinite (ra)
12447 && ra->cl != rvc_zero
12448 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12450 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12451 const int prec = fmt->p;
12452 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12453 int inexact, sg;
12454 mpfr_t m;
12455 tree result_lg;
12457 mpfr_init2 (m, prec);
12458 mpfr_from_real (m, ra, GMP_RNDN);
12459 mpfr_clear_flags ();
12460 inexact = mpfr_lgamma (m, &sg, m, rnd);
12461 result_lg = do_mpfr_ckconv (m, type, inexact);
12462 mpfr_clear (m);
12463 if (result_lg)
12465 tree result_sg;
12467 /* Dereference the arg_sg pointer argument. */
12468 arg_sg = build_fold_indirect_ref (arg_sg);
12469 /* Assign the signgam value into *arg_sg. */
12470 result_sg = fold_build2 (MODIFY_EXPR,
12471 TREE_TYPE (arg_sg), arg_sg,
12472 build_int_cst (TREE_TYPE (arg_sg), sg));
12473 TREE_SIDE_EFFECTS (result_sg) = 1;
12474 /* Combine the signgam assignment with the lgamma result. */
12475 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12476 result_sg, result_lg));
12481 return result;
12484 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12485 function FUNC on it and return the resulting value as a tree with
12486 type TYPE. The mpfr precision is set to the precision of TYPE. We
12487 assume that function FUNC returns zero if the result could be
12488 calculated exactly within the requested precision. */
12490 static tree
12491 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12493 tree result = NULL_TREE;
12495 STRIP_NOPS (arg);
12497 /* To proceed, MPFR must exactly represent the target floating point
12498 format, which only happens when the target base equals two. */
12499 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12500 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12501 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12503 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12504 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12506 if (real_isfinite (re) && real_isfinite (im))
12508 const struct real_format *const fmt =
12509 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12510 const int prec = fmt->p;
12511 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12512 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12513 int inexact;
12514 mpc_t m;
12516 mpc_init2 (m, prec);
12517 mpfr_from_real (mpc_realref (m), re, rnd);
12518 mpfr_from_real (mpc_imagref (m), im, rnd);
12519 mpfr_clear_flags ();
12520 inexact = func (m, m, crnd);
12521 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12522 mpc_clear (m);
12526 return result;
12529 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12530 mpc function FUNC on it and return the resulting value as a tree
12531 with type TYPE. The mpfr precision is set to the precision of
12532 TYPE. We assume that function FUNC returns zero if the result
12533 could be calculated exactly within the requested precision. If
12534 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12535 in the arguments and/or results. */
12537 tree
12538 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12539 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12541 tree result = NULL_TREE;
12543 STRIP_NOPS (arg0);
12544 STRIP_NOPS (arg1);
12546 /* To proceed, MPFR must exactly represent the target floating point
12547 format, which only happens when the target base equals two. */
12548 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12549 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12550 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12551 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12552 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12554 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12555 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12556 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12557 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12559 if (do_nonfinite
12560 || (real_isfinite (re0) && real_isfinite (im0)
12561 && real_isfinite (re1) && real_isfinite (im1)))
12563 const struct real_format *const fmt =
12564 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12565 const int prec = fmt->p;
12566 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12567 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12568 int inexact;
12569 mpc_t m0, m1;
12571 mpc_init2 (m0, prec);
12572 mpc_init2 (m1, prec);
12573 mpfr_from_real (mpc_realref (m0), re0, rnd);
12574 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12575 mpfr_from_real (mpc_realref (m1), re1, rnd);
12576 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12577 mpfr_clear_flags ();
12578 inexact = func (m0, m0, m1, crnd);
12579 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12580 mpc_clear (m0);
12581 mpc_clear (m1);
12585 return result;
12588 /* A wrapper function for builtin folding that prevents warnings for
12589 "statement without effect" and the like, caused by removing the
12590 call node earlier than the warning is generated. */
12592 tree
12593 fold_call_stmt (gimple stmt, bool ignore)
12595 tree ret = NULL_TREE;
12596 tree fndecl = gimple_call_fndecl (stmt);
12597 location_t loc = gimple_location (stmt);
12598 if (fndecl
12599 && TREE_CODE (fndecl) == FUNCTION_DECL
12600 && DECL_BUILT_IN (fndecl)
12601 && !gimple_call_va_arg_pack_p (stmt))
12603 int nargs = gimple_call_num_args (stmt);
12604 tree *args = (nargs > 0
12605 ? gimple_call_arg_ptr (stmt, 0)
12606 : &error_mark_node);
12608 if (avoid_folding_inline_builtin (fndecl))
12609 return NULL_TREE;
12610 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12612 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12614 else
12616 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
12617 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12618 if (ret)
12620 /* Propagate location information from original call to
12621 expansion of builtin. Otherwise things like
12622 maybe_emit_chk_warning, that operate on the expansion
12623 of a builtin, will use the wrong location information. */
12624 if (gimple_has_location (stmt))
12626 tree realret = ret;
12627 if (TREE_CODE (ret) == NOP_EXPR)
12628 realret = TREE_OPERAND (ret, 0);
12629 if (CAN_HAVE_LOCATION_P (realret)
12630 && !EXPR_HAS_LOCATION (realret))
12631 SET_EXPR_LOCATION (realret, loc);
12632 return realret;
12634 return ret;
12638 return NULL_TREE;
12641 /* Look up the function in builtin_decl that corresponds to DECL
12642 and set ASMSPEC as its user assembler name. DECL must be a
12643 function decl that declares a builtin. */
12645 void
12646 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12648 tree builtin;
12649 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12650 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12651 && asmspec != 0);
12653 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12654 set_user_assembler_name (builtin, asmspec);
12655 switch (DECL_FUNCTION_CODE (decl))
12657 case BUILT_IN_MEMCPY:
12658 init_block_move_fn (asmspec);
12659 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12660 break;
12661 case BUILT_IN_MEMSET:
12662 init_block_clear_fn (asmspec);
12663 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12664 break;
12665 case BUILT_IN_MEMMOVE:
12666 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12667 break;
12668 case BUILT_IN_MEMCMP:
12669 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12670 break;
12671 case BUILT_IN_ABORT:
12672 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12673 break;
12674 case BUILT_IN_FFS:
12675 if (INT_TYPE_SIZE < BITS_PER_WORD)
12677 set_user_assembler_libfunc ("ffs", asmspec);
12678 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12679 MODE_INT, 0), "ffs");
12681 break;
12682 default:
12683 break;
12687 /* Return true if DECL is a builtin that expands to a constant or similarly
12688 simple code. */
12689 bool
12690 is_simple_builtin (tree decl)
12692 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12693 switch (DECL_FUNCTION_CODE (decl))
12695 /* Builtins that expand to constants. */
12696 case BUILT_IN_CONSTANT_P:
12697 case BUILT_IN_EXPECT:
12698 case BUILT_IN_OBJECT_SIZE:
12699 case BUILT_IN_UNREACHABLE:
12700 /* Simple register moves or loads from stack. */
12701 case BUILT_IN_ASSUME_ALIGNED:
12702 case BUILT_IN_RETURN_ADDRESS:
12703 case BUILT_IN_EXTRACT_RETURN_ADDR:
12704 case BUILT_IN_FROB_RETURN_ADDR:
12705 case BUILT_IN_RETURN:
12706 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12707 case BUILT_IN_FRAME_ADDRESS:
12708 case BUILT_IN_VA_END:
12709 case BUILT_IN_STACK_SAVE:
12710 case BUILT_IN_STACK_RESTORE:
12711 /* Exception state returns or moves registers around. */
12712 case BUILT_IN_EH_FILTER:
12713 case BUILT_IN_EH_POINTER:
12714 case BUILT_IN_EH_COPY_VALUES:
12715 return true;
12717 default:
12718 return false;
12721 return false;
12724 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12725 most probably expanded inline into reasonably simple code. This is a
12726 superset of is_simple_builtin. */
12727 bool
12728 is_inexpensive_builtin (tree decl)
12730 if (!decl)
12731 return false;
12732 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12733 return true;
12734 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12735 switch (DECL_FUNCTION_CODE (decl))
12737 case BUILT_IN_ABS:
12738 case BUILT_IN_ALLOCA:
12739 case BUILT_IN_ALLOCA_WITH_ALIGN:
12740 case BUILT_IN_BSWAP16:
12741 case BUILT_IN_BSWAP32:
12742 case BUILT_IN_BSWAP64:
12743 case BUILT_IN_CLZ:
12744 case BUILT_IN_CLZIMAX:
12745 case BUILT_IN_CLZL:
12746 case BUILT_IN_CLZLL:
12747 case BUILT_IN_CTZ:
12748 case BUILT_IN_CTZIMAX:
12749 case BUILT_IN_CTZL:
12750 case BUILT_IN_CTZLL:
12751 case BUILT_IN_FFS:
12752 case BUILT_IN_FFSIMAX:
12753 case BUILT_IN_FFSL:
12754 case BUILT_IN_FFSLL:
12755 case BUILT_IN_IMAXABS:
12756 case BUILT_IN_FINITE:
12757 case BUILT_IN_FINITEF:
12758 case BUILT_IN_FINITEL:
12759 case BUILT_IN_FINITED32:
12760 case BUILT_IN_FINITED64:
12761 case BUILT_IN_FINITED128:
12762 case BUILT_IN_FPCLASSIFY:
12763 case BUILT_IN_ISFINITE:
12764 case BUILT_IN_ISINF_SIGN:
12765 case BUILT_IN_ISINF:
12766 case BUILT_IN_ISINFF:
12767 case BUILT_IN_ISINFL:
12768 case BUILT_IN_ISINFD32:
12769 case BUILT_IN_ISINFD64:
12770 case BUILT_IN_ISINFD128:
12771 case BUILT_IN_ISNAN:
12772 case BUILT_IN_ISNANF:
12773 case BUILT_IN_ISNANL:
12774 case BUILT_IN_ISNAND32:
12775 case BUILT_IN_ISNAND64:
12776 case BUILT_IN_ISNAND128:
12777 case BUILT_IN_ISNORMAL:
12778 case BUILT_IN_ISGREATER:
12779 case BUILT_IN_ISGREATEREQUAL:
12780 case BUILT_IN_ISLESS:
12781 case BUILT_IN_ISLESSEQUAL:
12782 case BUILT_IN_ISLESSGREATER:
12783 case BUILT_IN_ISUNORDERED:
12784 case BUILT_IN_VA_ARG_PACK:
12785 case BUILT_IN_VA_ARG_PACK_LEN:
12786 case BUILT_IN_VA_COPY:
12787 case BUILT_IN_TRAP:
12788 case BUILT_IN_SAVEREGS:
12789 case BUILT_IN_POPCOUNTL:
12790 case BUILT_IN_POPCOUNTLL:
12791 case BUILT_IN_POPCOUNTIMAX:
12792 case BUILT_IN_POPCOUNT:
12793 case BUILT_IN_PARITYL:
12794 case BUILT_IN_PARITYLL:
12795 case BUILT_IN_PARITYIMAX:
12796 case BUILT_IN_PARITY:
12797 case BUILT_IN_LABS:
12798 case BUILT_IN_LLABS:
12799 case BUILT_IN_PREFETCH:
12800 case BUILT_IN_ACC_ON_DEVICE:
12801 return true;
12803 default:
12804 return is_simple_builtin (decl);
12807 return false;