Merge branch 'master' r216746-r217593 into gimple-classes-v2-option-3
[official-gcc.git] / gcc / builtins.c
blobf1dc197b8904adf16f5b1dc20485de7074a33353
1 /* Expand builtin functions.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "calls.h"
30 #include "varasm.h"
31 #include "tree-object-size.h"
32 #include "realmpfr.h"
33 #include "predict.h"
34 #include "vec.h"
35 #include "hashtab.h"
36 #include "hash-set.h"
37 #include "hard-reg-set.h"
38 #include "input.h"
39 #include "function.h"
40 #include "cfgrtl.h"
41 #include "basic-block.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-expr.h"
45 #include "is-a.h"
46 #include "gimple.h"
47 #include "flags.h"
48 #include "regs.h"
49 #include "except.h"
50 #include "insn-config.h"
51 #include "expr.h"
52 #include "insn-codes.h"
53 #include "optabs.h"
54 #include "libfuncs.h"
55 #include "recog.h"
56 #include "output.h"
57 #include "typeclass.h"
58 #include "tm_p.h"
59 #include "target.h"
60 #include "langhooks.h"
61 #include "tree-ssanames.h"
62 #include "tree-dfa.h"
63 #include "value-prof.h"
64 #include "diagnostic-core.h"
65 #include "builtins.h"
66 #include "asan.h"
67 #include "cilk.h"
68 #include "ipa-ref.h"
69 #include "lto-streamer.h"
70 #include "cgraph.h"
71 #include "tree-chkp.h"
72 #include "rtl-chkp.h"
75 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
77 struct target_builtins default_target_builtins;
78 #if SWITCHABLE_TARGET
79 struct target_builtins *this_target_builtins = &default_target_builtins;
80 #endif
82 /* Define the names of the builtin function types and codes. */
83 const char *const built_in_class_names[BUILT_IN_LAST]
84 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
86 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
87 const char * built_in_names[(int) END_BUILTINS] =
89 #include "builtins.def"
91 #undef DEF_BUILTIN
93 /* Setup an array of _DECL trees, make sure each element is
94 initialized to NULL_TREE. */
95 builtin_info_type builtin_info;
97 /* Non-zero if __builtin_constant_p should be folded right away. */
98 bool force_folding_builtin_constant_p;
100 static rtx c_readstr (const char *, machine_mode);
101 static int target_char_cast (tree, char *);
102 static rtx get_memory_rtx (tree, tree);
103 static int apply_args_size (void);
104 static int apply_result_size (void);
105 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
106 static rtx result_vector (int, rtx);
107 #endif
108 static void expand_builtin_update_setjmp_buf (rtx);
109 static void expand_builtin_prefetch (tree);
110 static rtx expand_builtin_apply_args (void);
111 static rtx expand_builtin_apply_args_1 (void);
112 static rtx expand_builtin_apply (rtx, rtx, rtx);
113 static void expand_builtin_return (rtx);
114 static enum type_class type_to_class (tree);
115 static rtx expand_builtin_classify_type (tree);
116 static void expand_errno_check (tree, rtx);
117 static rtx expand_builtin_mathfn (tree, rtx, rtx);
118 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
119 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
120 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
121 static rtx expand_builtin_interclass_mathfn (tree, rtx);
122 static rtx expand_builtin_sincos (tree);
123 static rtx expand_builtin_cexpi (tree, rtx);
124 static rtx expand_builtin_int_roundingfn (tree, rtx);
125 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
126 static rtx expand_builtin_next_arg (void);
127 static rtx expand_builtin_va_start (tree);
128 static rtx expand_builtin_va_end (tree);
129 static rtx expand_builtin_va_copy (tree);
130 static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
131 static rtx expand_builtin_strcmp (tree, rtx);
132 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
133 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
134 static rtx expand_builtin_memcpy (tree, rtx);
135 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
136 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
137 machine_mode, int);
138 static rtx expand_builtin_strcpy (tree, rtx);
139 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
140 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
141 static rtx expand_builtin_strncpy (tree, rtx);
142 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
143 static rtx expand_builtin_memset (tree, rtx, machine_mode);
144 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
145 static rtx expand_builtin_bzero (tree);
146 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
147 static rtx expand_builtin_alloca (tree, bool);
148 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
149 static rtx expand_builtin_frame_address (tree, tree);
150 static tree stabilize_va_list_loc (location_t, tree, int);
151 static rtx expand_builtin_expect (tree, rtx);
152 static tree fold_builtin_constant_p (tree);
153 static tree fold_builtin_classify_type (tree);
154 static tree fold_builtin_strlen (location_t, tree, tree);
155 static tree fold_builtin_inf (location_t, tree, int);
156 static tree fold_builtin_nan (tree, tree, int);
157 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
158 static bool validate_arg (const_tree, enum tree_code code);
159 static bool integer_valued_real_p (tree);
160 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
161 static rtx expand_builtin_fabs (tree, rtx, rtx);
162 static rtx expand_builtin_signbit (tree, rtx);
163 static tree fold_builtin_sqrt (location_t, tree, tree);
164 static tree fold_builtin_cbrt (location_t, tree, tree);
165 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
166 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
167 static tree fold_builtin_cos (location_t, tree, tree, tree);
168 static tree fold_builtin_cosh (location_t, tree, tree, tree);
169 static tree fold_builtin_tan (tree, tree);
170 static tree fold_builtin_trunc (location_t, tree, tree);
171 static tree fold_builtin_floor (location_t, tree, tree);
172 static tree fold_builtin_ceil (location_t, tree, tree);
173 static tree fold_builtin_round (location_t, tree, tree);
174 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
175 static tree fold_builtin_bitop (tree, tree);
176 static tree fold_builtin_strchr (location_t, tree, tree, tree);
177 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
178 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
179 static tree fold_builtin_strcmp (location_t, tree, tree);
180 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
181 static tree fold_builtin_signbit (location_t, tree, tree);
182 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
183 static tree fold_builtin_isascii (location_t, tree);
184 static tree fold_builtin_toascii (location_t, tree);
185 static tree fold_builtin_isdigit (location_t, tree);
186 static tree fold_builtin_fabs (location_t, tree, tree);
187 static tree fold_builtin_abs (location_t, tree, tree);
188 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
189 enum tree_code);
190 static tree fold_builtin_0 (location_t, tree, bool);
191 static tree fold_builtin_1 (location_t, tree, tree, bool);
192 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
193 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
194 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
195 static tree fold_builtin_varargs (location_t, tree, tree, bool);
197 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
198 static tree fold_builtin_strstr (location_t, tree, tree, tree);
199 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
200 static tree fold_builtin_strncat (location_t, tree, tree, tree);
201 static tree fold_builtin_strspn (location_t, tree, tree);
202 static tree fold_builtin_strcspn (location_t, tree, tree);
204 static rtx expand_builtin_object_size (tree);
205 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
206 enum built_in_function);
207 static void maybe_emit_chk_warning (tree, enum built_in_function);
208 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
209 static void maybe_emit_free_warning (tree);
210 static tree fold_builtin_object_size (tree, tree);
211 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
212 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
213 enum built_in_function);
215 static unsigned HOST_WIDE_INT target_newline;
216 unsigned HOST_WIDE_INT target_percent;
217 static unsigned HOST_WIDE_INT target_c;
218 static unsigned HOST_WIDE_INT target_s;
219 static char target_percent_c[3];
220 char target_percent_s[3];
221 static char target_percent_s_newline[4];
222 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
223 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
224 static tree do_mpfr_arg2 (tree, tree, tree,
225 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
226 static tree do_mpfr_arg3 (tree, tree, tree, tree,
227 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
228 static tree do_mpfr_sincos (tree, tree, tree);
229 static tree do_mpfr_bessel_n (tree, tree, tree,
230 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
231 const REAL_VALUE_TYPE *, bool);
232 static tree do_mpfr_remquo (tree, tree, tree);
233 static tree do_mpfr_lgamma_r (tree, tree, tree);
234 static void expand_builtin_sync_synchronize (void);
236 /* Return true if NAME starts with __builtin_ or __sync_. */
238 static bool
239 is_builtin_name (const char *name)
241 if (strncmp (name, "__builtin_", 10) == 0)
242 return true;
243 if (strncmp (name, "__sync_", 7) == 0)
244 return true;
245 if (strncmp (name, "__atomic_", 9) == 0)
246 return true;
247 if (flag_cilkplus
248 && (!strcmp (name, "__cilkrts_detach")
249 || !strcmp (name, "__cilkrts_pop_frame")))
250 return true;
251 return false;
255 /* Return true if DECL is a function symbol representing a built-in. */
257 bool
258 is_builtin_fn (tree decl)
260 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
263 /* Return true if NODE should be considered for inline expansion regardless
264 of the optimization level. This means whenever a function is invoked with
265 its "internal" name, which normally contains the prefix "__builtin". */
267 static bool
268 called_as_built_in (tree node)
270 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
271 we want the name used to call the function, not the name it
272 will have. */
273 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
274 return is_builtin_name (name);
277 /* Compute values M and N such that M divides (address of EXP - N) and such
278 that N < M. If these numbers can be determined, store M in alignp and N in
279 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
280 *alignp and any bit-offset to *bitposp.
282 Note that the address (and thus the alignment) computed here is based
283 on the address to which a symbol resolves, whereas DECL_ALIGN is based
284 on the address at which an object is actually located. These two
285 addresses are not always the same. For example, on ARM targets,
286 the address &foo of a Thumb function foo() has the lowest bit set,
287 whereas foo() itself starts on an even address.
289 If ADDR_P is true we are taking the address of the memory reference EXP
290 and thus cannot rely on the access taking place. */
292 static bool
293 get_object_alignment_2 (tree exp, unsigned int *alignp,
294 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
296 HOST_WIDE_INT bitsize, bitpos;
297 tree offset;
298 machine_mode mode;
299 int unsignedp, volatilep;
300 unsigned int align = BITS_PER_UNIT;
301 bool known_alignment = false;
303 /* Get the innermost object and the constant (bitpos) and possibly
304 variable (offset) offset of the access. */
305 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
306 &mode, &unsignedp, &volatilep, true);
308 /* Extract alignment information from the innermost object and
309 possibly adjust bitpos and offset. */
310 if (TREE_CODE (exp) == FUNCTION_DECL)
312 /* Function addresses can encode extra information besides their
313 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
314 allows the low bit to be used as a virtual bit, we know
315 that the address itself must be at least 2-byte aligned. */
316 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
317 align = 2 * BITS_PER_UNIT;
319 else if (TREE_CODE (exp) == LABEL_DECL)
321 else if (TREE_CODE (exp) == CONST_DECL)
323 /* The alignment of a CONST_DECL is determined by its initializer. */
324 exp = DECL_INITIAL (exp);
325 align = TYPE_ALIGN (TREE_TYPE (exp));
326 #ifdef CONSTANT_ALIGNMENT
327 if (CONSTANT_CLASS_P (exp))
328 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
329 #endif
330 known_alignment = true;
332 else if (DECL_P (exp))
334 align = DECL_ALIGN (exp);
335 known_alignment = true;
337 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
339 align = TYPE_ALIGN (TREE_TYPE (exp));
341 else if (TREE_CODE (exp) == INDIRECT_REF
342 || TREE_CODE (exp) == MEM_REF
343 || TREE_CODE (exp) == TARGET_MEM_REF)
345 tree addr = TREE_OPERAND (exp, 0);
346 unsigned ptr_align;
347 unsigned HOST_WIDE_INT ptr_bitpos;
349 if (TREE_CODE (addr) == BIT_AND_EXPR
350 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
352 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
353 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
354 align *= BITS_PER_UNIT;
355 addr = TREE_OPERAND (addr, 0);
358 known_alignment
359 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
360 align = MAX (ptr_align, align);
362 /* The alignment of the pointer operand in a TARGET_MEM_REF
363 has to take the variable offset parts into account. */
364 if (TREE_CODE (exp) == TARGET_MEM_REF)
366 if (TMR_INDEX (exp))
368 unsigned HOST_WIDE_INT step = 1;
369 if (TMR_STEP (exp))
370 step = TREE_INT_CST_LOW (TMR_STEP (exp));
371 align = MIN (align, (step & -step) * BITS_PER_UNIT);
373 if (TMR_INDEX2 (exp))
374 align = BITS_PER_UNIT;
375 known_alignment = false;
378 /* When EXP is an actual memory reference then we can use
379 TYPE_ALIGN of a pointer indirection to derive alignment.
380 Do so only if get_pointer_alignment_1 did not reveal absolute
381 alignment knowledge and if using that alignment would
382 improve the situation. */
383 if (!addr_p && !known_alignment
384 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
385 align = TYPE_ALIGN (TREE_TYPE (exp));
386 else
388 /* Else adjust bitpos accordingly. */
389 bitpos += ptr_bitpos;
390 if (TREE_CODE (exp) == MEM_REF
391 || TREE_CODE (exp) == TARGET_MEM_REF)
392 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
395 else if (TREE_CODE (exp) == STRING_CST)
397 /* STRING_CST are the only constant objects we allow to be not
398 wrapped inside a CONST_DECL. */
399 align = TYPE_ALIGN (TREE_TYPE (exp));
400 #ifdef CONSTANT_ALIGNMENT
401 if (CONSTANT_CLASS_P (exp))
402 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
403 #endif
404 known_alignment = true;
407 /* If there is a non-constant offset part extract the maximum
408 alignment that can prevail. */
409 if (offset)
411 unsigned int trailing_zeros = tree_ctz (offset);
412 if (trailing_zeros < HOST_BITS_PER_INT)
414 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
415 if (inner)
416 align = MIN (align, inner);
420 *alignp = align;
421 *bitposp = bitpos & (*alignp - 1);
422 return known_alignment;
425 /* For a memory reference expression EXP compute values M and N such that M
426 divides (&EXP - N) and such that N < M. If these numbers can be determined,
427 store M in alignp and N in *BITPOSP and return true. Otherwise return false
428 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
430 bool
431 get_object_alignment_1 (tree exp, unsigned int *alignp,
432 unsigned HOST_WIDE_INT *bitposp)
434 return get_object_alignment_2 (exp, alignp, bitposp, false);
437 /* Return the alignment in bits of EXP, an object. */
439 unsigned int
440 get_object_alignment (tree exp)
442 unsigned HOST_WIDE_INT bitpos = 0;
443 unsigned int align;
445 get_object_alignment_1 (exp, &align, &bitpos);
447 /* align and bitpos now specify known low bits of the pointer.
448 ptr & (align - 1) == bitpos. */
450 if (bitpos != 0)
451 align = (bitpos & -bitpos);
452 return align;
455 /* For a pointer valued expression EXP compute values M and N such that M
456 divides (EXP - N) and such that N < M. If these numbers can be determined,
457 store M in alignp and N in *BITPOSP and return true. Return false if
458 the results are just a conservative approximation.
460 If EXP is not a pointer, false is returned too. */
462 bool
463 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
464 unsigned HOST_WIDE_INT *bitposp)
466 STRIP_NOPS (exp);
468 if (TREE_CODE (exp) == ADDR_EXPR)
469 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
470 alignp, bitposp, true);
471 else if (TREE_CODE (exp) == SSA_NAME
472 && POINTER_TYPE_P (TREE_TYPE (exp)))
474 unsigned int ptr_align, ptr_misalign;
475 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
477 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
479 *bitposp = ptr_misalign * BITS_PER_UNIT;
480 *alignp = ptr_align * BITS_PER_UNIT;
481 /* We cannot really tell whether this result is an approximation. */
482 return true;
484 else
486 *bitposp = 0;
487 *alignp = BITS_PER_UNIT;
488 return false;
491 else if (TREE_CODE (exp) == INTEGER_CST)
493 *alignp = BIGGEST_ALIGNMENT;
494 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
495 & (BIGGEST_ALIGNMENT - 1));
496 return true;
499 *bitposp = 0;
500 *alignp = BITS_PER_UNIT;
501 return false;
504 /* Return the alignment in bits of EXP, a pointer valued expression.
505 The alignment returned is, by default, the alignment of the thing that
506 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
508 Otherwise, look at the expression to see if we can do better, i.e., if the
509 expression is actually pointing at an object whose alignment is tighter. */
511 unsigned int
512 get_pointer_alignment (tree exp)
514 unsigned HOST_WIDE_INT bitpos = 0;
515 unsigned int align;
517 get_pointer_alignment_1 (exp, &align, &bitpos);
519 /* align and bitpos now specify known low bits of the pointer.
520 ptr & (align - 1) == bitpos. */
522 if (bitpos != 0)
523 align = (bitpos & -bitpos);
525 return align;
528 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
529 way, because it could contain a zero byte in the middle.
530 TREE_STRING_LENGTH is the size of the character array, not the string.
532 ONLY_VALUE should be nonzero if the result is not going to be emitted
533 into the instruction stream and zero if it is going to be expanded.
534 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
535 is returned, otherwise NULL, since
536 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
537 evaluate the side-effects.
539 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
540 accesses. Note that this implies the result is not going to be emitted
541 into the instruction stream.
543 The value returned is of type `ssizetype'.
545 Unfortunately, string_constant can't access the values of const char
546 arrays with initializers, so neither can we do so here. */
548 tree
549 c_strlen (tree src, int only_value)
551 tree offset_node;
552 HOST_WIDE_INT offset;
553 int max;
554 const char *ptr;
555 location_t loc;
557 STRIP_NOPS (src);
558 if (TREE_CODE (src) == COND_EXPR
559 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
561 tree len1, len2;
563 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
564 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
565 if (tree_int_cst_equal (len1, len2))
566 return len1;
569 if (TREE_CODE (src) == COMPOUND_EXPR
570 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
571 return c_strlen (TREE_OPERAND (src, 1), only_value);
573 loc = EXPR_LOC_OR_LOC (src, input_location);
575 src = string_constant (src, &offset_node);
576 if (src == 0)
577 return NULL_TREE;
579 max = TREE_STRING_LENGTH (src) - 1;
580 ptr = TREE_STRING_POINTER (src);
582 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
584 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
585 compute the offset to the following null if we don't know where to
586 start searching for it. */
587 int i;
589 for (i = 0; i < max; i++)
590 if (ptr[i] == 0)
591 return NULL_TREE;
593 /* We don't know the starting offset, but we do know that the string
594 has no internal zero bytes. We can assume that the offset falls
595 within the bounds of the string; otherwise, the programmer deserves
596 what he gets. Subtract the offset from the length of the string,
597 and return that. This would perhaps not be valid if we were dealing
598 with named arrays in addition to literal string constants. */
600 return size_diffop_loc (loc, size_int (max), offset_node);
603 /* We have a known offset into the string. Start searching there for
604 a null character if we can represent it as a single HOST_WIDE_INT. */
605 if (offset_node == 0)
606 offset = 0;
607 else if (! tree_fits_shwi_p (offset_node))
608 offset = -1;
609 else
610 offset = tree_to_shwi (offset_node);
612 /* If the offset is known to be out of bounds, warn, and call strlen at
613 runtime. */
614 if (offset < 0 || offset > max)
616 /* Suppress multiple warnings for propagated constant strings. */
617 if (only_value != 2
618 && !TREE_NO_WARNING (src))
620 warning_at (loc, 0, "offset outside bounds of constant string");
621 TREE_NO_WARNING (src) = 1;
623 return NULL_TREE;
626 /* Use strlen to search for the first zero byte. Since any strings
627 constructed with build_string will have nulls appended, we win even
628 if we get handed something like (char[4])"abcd".
630 Since OFFSET is our starting index into the string, no further
631 calculation is needed. */
632 return ssize_int (strlen (ptr + offset));
635 /* Return a char pointer for a C string if it is a string constant
636 or sum of string constant and integer constant. */
638 const char *
639 c_getstr (tree src)
641 tree offset_node;
643 src = string_constant (src, &offset_node);
644 if (src == 0)
645 return 0;
647 if (offset_node == 0)
648 return TREE_STRING_POINTER (src);
649 else if (!tree_fits_uhwi_p (offset_node)
650 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
651 return 0;
653 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
656 /* Return a constant integer corresponding to target reading
657 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
659 static rtx
660 c_readstr (const char *str, machine_mode mode)
662 HOST_WIDE_INT ch;
663 unsigned int i, j;
664 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
666 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
667 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
668 / HOST_BITS_PER_WIDE_INT;
670 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
671 for (i = 0; i < len; i++)
672 tmp[i] = 0;
674 ch = 1;
675 for (i = 0; i < GET_MODE_SIZE (mode); i++)
677 j = i;
678 if (WORDS_BIG_ENDIAN)
679 j = GET_MODE_SIZE (mode) - i - 1;
680 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
681 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
682 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
683 j *= BITS_PER_UNIT;
685 if (ch)
686 ch = (unsigned char) str[i];
687 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
690 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
691 return immed_wide_int_const (c, mode);
694 /* Cast a target constant CST to target CHAR and if that value fits into
695 host char type, return zero and put that value into variable pointed to by
696 P. */
698 static int
699 target_char_cast (tree cst, char *p)
701 unsigned HOST_WIDE_INT val, hostval;
703 if (TREE_CODE (cst) != INTEGER_CST
704 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
705 return 1;
707 /* Do not care if it fits or not right here. */
708 val = TREE_INT_CST_LOW (cst);
710 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
711 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
713 hostval = val;
714 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
715 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
717 if (val != hostval)
718 return 1;
720 *p = hostval;
721 return 0;
724 /* Similar to save_expr, but assumes that arbitrary code is not executed
725 in between the multiple evaluations. In particular, we assume that a
726 non-addressable local variable will not be modified. */
728 static tree
729 builtin_save_expr (tree exp)
731 if (TREE_CODE (exp) == SSA_NAME
732 || (TREE_ADDRESSABLE (exp) == 0
733 && (TREE_CODE (exp) == PARM_DECL
734 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
735 return exp;
737 return save_expr (exp);
740 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
741 times to get the address of either a higher stack frame, or a return
742 address located within it (depending on FNDECL_CODE). */
744 static rtx
745 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
747 int i;
749 #ifdef INITIAL_FRAME_ADDRESS_RTX
750 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
751 #else
752 rtx tem;
754 /* For a zero count with __builtin_return_address, we don't care what
755 frame address we return, because target-specific definitions will
756 override us. Therefore frame pointer elimination is OK, and using
757 the soft frame pointer is OK.
759 For a nonzero count, or a zero count with __builtin_frame_address,
760 we require a stable offset from the current frame pointer to the
761 previous one, so we must use the hard frame pointer, and
762 we must disable frame pointer elimination. */
763 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
764 tem = frame_pointer_rtx;
765 else
767 tem = hard_frame_pointer_rtx;
769 /* Tell reload not to eliminate the frame pointer. */
770 crtl->accesses_prior_frames = 1;
772 #endif
774 /* Some machines need special handling before we can access
775 arbitrary frames. For example, on the SPARC, we must first flush
776 all register windows to the stack. */
777 #ifdef SETUP_FRAME_ADDRESSES
778 if (count > 0)
779 SETUP_FRAME_ADDRESSES ();
780 #endif
782 /* On the SPARC, the return address is not in the frame, it is in a
783 register. There is no way to access it off of the current frame
784 pointer, but it can be accessed off the previous frame pointer by
785 reading the value from the register window save area. */
786 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
787 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
788 count--;
789 #endif
791 /* Scan back COUNT frames to the specified frame. */
792 for (i = 0; i < count; i++)
794 /* Assume the dynamic chain pointer is in the word that the
795 frame address points to, unless otherwise specified. */
796 #ifdef DYNAMIC_CHAIN_ADDRESS
797 tem = DYNAMIC_CHAIN_ADDRESS (tem);
798 #endif
799 tem = memory_address (Pmode, tem);
800 tem = gen_frame_mem (Pmode, tem);
801 tem = copy_to_reg (tem);
804 /* For __builtin_frame_address, return what we've got. But, on
805 the SPARC for example, we may have to add a bias. */
806 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
807 #ifdef FRAME_ADDR_RTX
808 return FRAME_ADDR_RTX (tem);
809 #else
810 return tem;
811 #endif
813 /* For __builtin_return_address, get the return address from that frame. */
814 #ifdef RETURN_ADDR_RTX
815 tem = RETURN_ADDR_RTX (count, tem);
816 #else
817 tem = memory_address (Pmode,
818 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
819 tem = gen_frame_mem (Pmode, tem);
820 #endif
821 return tem;
824 /* Alias set used for setjmp buffer. */
825 static alias_set_type setjmp_alias_set = -1;
827 /* Construct the leading half of a __builtin_setjmp call. Control will
828 return to RECEIVER_LABEL. This is also called directly by the SJLJ
829 exception handling code. */
831 void
832 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
834 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
835 rtx stack_save;
836 rtx mem;
838 if (setjmp_alias_set == -1)
839 setjmp_alias_set = new_alias_set ();
841 buf_addr = convert_memory_address (Pmode, buf_addr);
843 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
845 /* We store the frame pointer and the address of receiver_label in
846 the buffer and use the rest of it for the stack save area, which
847 is machine-dependent. */
849 mem = gen_rtx_MEM (Pmode, buf_addr);
850 set_mem_alias_set (mem, setjmp_alias_set);
851 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
853 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
854 GET_MODE_SIZE (Pmode))),
855 set_mem_alias_set (mem, setjmp_alias_set);
857 emit_move_insn (validize_mem (mem),
858 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
860 stack_save = gen_rtx_MEM (sa_mode,
861 plus_constant (Pmode, buf_addr,
862 2 * GET_MODE_SIZE (Pmode)));
863 set_mem_alias_set (stack_save, setjmp_alias_set);
864 emit_stack_save (SAVE_NONLOCAL, &stack_save);
866 /* If there is further processing to do, do it. */
867 #ifdef HAVE_builtin_setjmp_setup
868 if (HAVE_builtin_setjmp_setup)
869 emit_insn (gen_builtin_setjmp_setup (buf_addr));
870 #endif
872 /* We have a nonlocal label. */
873 cfun->has_nonlocal_label = 1;
876 /* Construct the trailing part of a __builtin_setjmp call. This is
877 also called directly by the SJLJ exception handling code.
878 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
880 void
881 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
883 rtx chain;
885 /* Mark the FP as used when we get here, so we have to make sure it's
886 marked as used by this function. */
887 emit_use (hard_frame_pointer_rtx);
889 /* Mark the static chain as clobbered here so life information
890 doesn't get messed up for it. */
891 chain = targetm.calls.static_chain (current_function_decl, true);
892 if (chain && REG_P (chain))
893 emit_clobber (chain);
895 /* Now put in the code to restore the frame pointer, and argument
896 pointer, if needed. */
897 #ifdef HAVE_nonlocal_goto
898 if (! HAVE_nonlocal_goto)
899 #endif
901 /* First adjust our frame pointer to its actual value. It was
902 previously set to the start of the virtual area corresponding to
903 the stacked variables when we branched here and now needs to be
904 adjusted to the actual hardware fp value.
906 Assignments to virtual registers are converted by
907 instantiate_virtual_regs into the corresponding assignment
908 to the underlying register (fp in this case) that makes
909 the original assignment true.
910 So the following insn will actually be decrementing fp by
911 STARTING_FRAME_OFFSET. */
912 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
914 /* Restoring the frame pointer also modifies the hard frame pointer.
915 Mark it used (so that the previous assignment remains live once
916 the frame pointer is eliminated) and clobbered (to represent the
917 implicit update from the assignment). */
918 emit_use (hard_frame_pointer_rtx);
919 emit_clobber (hard_frame_pointer_rtx);
922 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
923 if (fixed_regs[ARG_POINTER_REGNUM])
925 #ifdef ELIMINABLE_REGS
926 /* If the argument pointer can be eliminated in favor of the
927 frame pointer, we don't need to restore it. We assume here
928 that if such an elimination is present, it can always be used.
929 This is the case on all known machines; if we don't make this
930 assumption, we do unnecessary saving on many machines. */
931 size_t i;
932 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
934 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
935 if (elim_regs[i].from == ARG_POINTER_REGNUM
936 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
937 break;
939 if (i == ARRAY_SIZE (elim_regs))
940 #endif
942 /* Now restore our arg pointer from the address at which it
943 was saved in our stack frame. */
944 emit_move_insn (crtl->args.internal_arg_pointer,
945 copy_to_reg (get_arg_pointer_save_area ()));
948 #endif
950 #ifdef HAVE_builtin_setjmp_receiver
951 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
952 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
953 else
954 #endif
955 #ifdef HAVE_nonlocal_goto_receiver
956 if (HAVE_nonlocal_goto_receiver)
957 emit_insn (gen_nonlocal_goto_receiver ());
958 else
959 #endif
960 { /* Nothing */ }
962 /* We must not allow the code we just generated to be reordered by
963 scheduling. Specifically, the update of the frame pointer must
964 happen immediately, not later. */
965 emit_insn (gen_blockage ());
968 /* __builtin_longjmp is passed a pointer to an array of five words (not
969 all will be used on all machines). It operates similarly to the C
970 library function of the same name, but is more efficient. Much of
971 the code below is copied from the handling of non-local gotos. */
973 static void
974 expand_builtin_longjmp (rtx buf_addr, rtx value)
976 rtx fp, lab, stack;
977 rtx_insn *insn, *last;
978 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
980 /* DRAP is needed for stack realign if longjmp is expanded to current
981 function */
982 if (SUPPORTS_STACK_ALIGNMENT)
983 crtl->need_drap = true;
985 if (setjmp_alias_set == -1)
986 setjmp_alias_set = new_alias_set ();
988 buf_addr = convert_memory_address (Pmode, buf_addr);
990 buf_addr = force_reg (Pmode, buf_addr);
992 /* We require that the user must pass a second argument of 1, because
993 that is what builtin_setjmp will return. */
994 gcc_assert (value == const1_rtx);
996 last = get_last_insn ();
997 #ifdef HAVE_builtin_longjmp
998 if (HAVE_builtin_longjmp)
999 emit_insn (gen_builtin_longjmp (buf_addr));
1000 else
1001 #endif
1003 fp = gen_rtx_MEM (Pmode, buf_addr);
1004 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1005 GET_MODE_SIZE (Pmode)));
1007 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1008 2 * GET_MODE_SIZE (Pmode)));
1009 set_mem_alias_set (fp, setjmp_alias_set);
1010 set_mem_alias_set (lab, setjmp_alias_set);
1011 set_mem_alias_set (stack, setjmp_alias_set);
1013 /* Pick up FP, label, and SP from the block and jump. This code is
1014 from expand_goto in stmt.c; see there for detailed comments. */
1015 #ifdef HAVE_nonlocal_goto
1016 if (HAVE_nonlocal_goto)
1017 /* We have to pass a value to the nonlocal_goto pattern that will
1018 get copied into the static_chain pointer, but it does not matter
1019 what that value is, because builtin_setjmp does not use it. */
1020 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1021 else
1022 #endif
1024 lab = copy_to_reg (lab);
1026 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1027 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1029 emit_move_insn (hard_frame_pointer_rtx, fp);
1030 emit_stack_restore (SAVE_NONLOCAL, stack);
1032 emit_use (hard_frame_pointer_rtx);
1033 emit_use (stack_pointer_rtx);
1034 emit_indirect_jump (lab);
1038 /* Search backwards and mark the jump insn as a non-local goto.
1039 Note that this precludes the use of __builtin_longjmp to a
1040 __builtin_setjmp target in the same function. However, we've
1041 already cautioned the user that these functions are for
1042 internal exception handling use only. */
1043 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1045 gcc_assert (insn != last);
1047 if (JUMP_P (insn))
1049 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1050 break;
1052 else if (CALL_P (insn))
1053 break;
1057 static inline bool
1058 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1060 return (iter->i < iter->n);
1063 /* This function validates the types of a function call argument list
1064 against a specified list of tree_codes. If the last specifier is a 0,
1065 that represents an ellipses, otherwise the last specifier must be a
1066 VOID_TYPE. */
1068 static bool
1069 validate_arglist (const_tree callexpr, ...)
1071 enum tree_code code;
1072 bool res = 0;
1073 va_list ap;
1074 const_call_expr_arg_iterator iter;
1075 const_tree arg;
1077 va_start (ap, callexpr);
1078 init_const_call_expr_arg_iterator (callexpr, &iter);
1082 code = (enum tree_code) va_arg (ap, int);
1083 switch (code)
1085 case 0:
1086 /* This signifies an ellipses, any further arguments are all ok. */
1087 res = true;
1088 goto end;
1089 case VOID_TYPE:
1090 /* This signifies an endlink, if no arguments remain, return
1091 true, otherwise return false. */
1092 res = !more_const_call_expr_args_p (&iter);
1093 goto end;
1094 default:
1095 /* If no parameters remain or the parameter's code does not
1096 match the specified code, return false. Otherwise continue
1097 checking any remaining arguments. */
1098 arg = next_const_call_expr_arg (&iter);
1099 if (!validate_arg (arg, code))
1100 goto end;
1101 break;
1104 while (1);
1106 /* We need gotos here since we can only have one VA_CLOSE in a
1107 function. */
1108 end: ;
1109 va_end (ap);
1111 return res;
1114 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1115 and the address of the save area. */
1117 static rtx
1118 expand_builtin_nonlocal_goto (tree exp)
1120 tree t_label, t_save_area;
1121 rtx r_label, r_save_area, r_fp, r_sp;
1122 rtx_insn *insn;
1124 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1125 return NULL_RTX;
1127 t_label = CALL_EXPR_ARG (exp, 0);
1128 t_save_area = CALL_EXPR_ARG (exp, 1);
1130 r_label = expand_normal (t_label);
1131 r_label = convert_memory_address (Pmode, r_label);
1132 r_save_area = expand_normal (t_save_area);
1133 r_save_area = convert_memory_address (Pmode, r_save_area);
1134 /* Copy the address of the save location to a register just in case it was
1135 based on the frame pointer. */
1136 r_save_area = copy_to_reg (r_save_area);
1137 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1138 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1139 plus_constant (Pmode, r_save_area,
1140 GET_MODE_SIZE (Pmode)));
1142 crtl->has_nonlocal_goto = 1;
1144 #ifdef HAVE_nonlocal_goto
1145 /* ??? We no longer need to pass the static chain value, afaik. */
1146 if (HAVE_nonlocal_goto)
1147 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1148 else
1149 #endif
1151 r_label = copy_to_reg (r_label);
1153 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1154 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1156 /* Restore frame pointer for containing function. */
1157 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1158 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1160 /* USE of hard_frame_pointer_rtx added for consistency;
1161 not clear if really needed. */
1162 emit_use (hard_frame_pointer_rtx);
1163 emit_use (stack_pointer_rtx);
1165 /* If the architecture is using a GP register, we must
1166 conservatively assume that the target function makes use of it.
1167 The prologue of functions with nonlocal gotos must therefore
1168 initialize the GP register to the appropriate value, and we
1169 must then make sure that this value is live at the point
1170 of the jump. (Note that this doesn't necessarily apply
1171 to targets with a nonlocal_goto pattern; they are free
1172 to implement it in their own way. Note also that this is
1173 a no-op if the GP register is a global invariant.) */
1174 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1175 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1176 emit_use (pic_offset_table_rtx);
1178 emit_indirect_jump (r_label);
1181 /* Search backwards to the jump insn and mark it as a
1182 non-local goto. */
1183 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1185 if (JUMP_P (insn))
1187 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1188 break;
1190 else if (CALL_P (insn))
1191 break;
1194 return const0_rtx;
1197 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1198 (not all will be used on all machines) that was passed to __builtin_setjmp.
1199 It updates the stack pointer in that block to correspond to the current
1200 stack pointer. */
1202 static void
1203 expand_builtin_update_setjmp_buf (rtx buf_addr)
1205 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1206 rtx stack_save
1207 = gen_rtx_MEM (sa_mode,
1208 memory_address
1209 (sa_mode,
1210 plus_constant (Pmode, buf_addr,
1211 2 * GET_MODE_SIZE (Pmode))));
1213 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1216 /* Expand a call to __builtin_prefetch. For a target that does not support
1217 data prefetch, evaluate the memory address argument in case it has side
1218 effects. */
1220 static void
1221 expand_builtin_prefetch (tree exp)
1223 tree arg0, arg1, arg2;
1224 int nargs;
1225 rtx op0, op1, op2;
1227 if (!validate_arglist (exp, POINTER_TYPE, 0))
1228 return;
1230 arg0 = CALL_EXPR_ARG (exp, 0);
1232 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1233 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1234 locality). */
1235 nargs = call_expr_nargs (exp);
1236 if (nargs > 1)
1237 arg1 = CALL_EXPR_ARG (exp, 1);
1238 else
1239 arg1 = integer_zero_node;
1240 if (nargs > 2)
1241 arg2 = CALL_EXPR_ARG (exp, 2);
1242 else
1243 arg2 = integer_three_node;
1245 /* Argument 0 is an address. */
1246 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1248 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1249 if (TREE_CODE (arg1) != INTEGER_CST)
1251 error ("second argument to %<__builtin_prefetch%> must be a constant");
1252 arg1 = integer_zero_node;
1254 op1 = expand_normal (arg1);
1255 /* Argument 1 must be either zero or one. */
1256 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1258 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1259 " using zero");
1260 op1 = const0_rtx;
1263 /* Argument 2 (locality) must be a compile-time constant int. */
1264 if (TREE_CODE (arg2) != INTEGER_CST)
1266 error ("third argument to %<__builtin_prefetch%> must be a constant");
1267 arg2 = integer_zero_node;
1269 op2 = expand_normal (arg2);
1270 /* Argument 2 must be 0, 1, 2, or 3. */
1271 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1273 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1274 op2 = const0_rtx;
1277 #ifdef HAVE_prefetch
1278 if (HAVE_prefetch)
1280 struct expand_operand ops[3];
1282 create_address_operand (&ops[0], op0);
1283 create_integer_operand (&ops[1], INTVAL (op1));
1284 create_integer_operand (&ops[2], INTVAL (op2));
1285 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1286 return;
1288 #endif
1290 /* Don't do anything with direct references to volatile memory, but
1291 generate code to handle other side effects. */
1292 if (!MEM_P (op0) && side_effects_p (op0))
1293 emit_insn (op0);
1296 /* Get a MEM rtx for expression EXP which is the address of an operand
1297 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1298 the maximum length of the block of memory that might be accessed or
1299 NULL if unknown. */
1301 static rtx
1302 get_memory_rtx (tree exp, tree len)
1304 tree orig_exp = exp;
1305 rtx addr, mem;
1307 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1308 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1309 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1310 exp = TREE_OPERAND (exp, 0);
1312 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1313 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1315 /* Get an expression we can use to find the attributes to assign to MEM.
1316 First remove any nops. */
1317 while (CONVERT_EXPR_P (exp)
1318 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1319 exp = TREE_OPERAND (exp, 0);
1321 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1322 (as builtin stringops may alias with anything). */
1323 exp = fold_build2 (MEM_REF,
1324 build_array_type (char_type_node,
1325 build_range_type (sizetype,
1326 size_one_node, len)),
1327 exp, build_int_cst (ptr_type_node, 0));
1329 /* If the MEM_REF has no acceptable address, try to get the base object
1330 from the original address we got, and build an all-aliasing
1331 unknown-sized access to that one. */
1332 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1333 set_mem_attributes (mem, exp, 0);
1334 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1335 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1336 0))))
1338 exp = build_fold_addr_expr (exp);
1339 exp = fold_build2 (MEM_REF,
1340 build_array_type (char_type_node,
1341 build_range_type (sizetype,
1342 size_zero_node,
1343 NULL)),
1344 exp, build_int_cst (ptr_type_node, 0));
1345 set_mem_attributes (mem, exp, 0);
1347 set_mem_alias_set (mem, 0);
1348 return mem;
1351 /* Built-in functions to perform an untyped call and return. */
1353 #define apply_args_mode \
1354 (this_target_builtins->x_apply_args_mode)
1355 #define apply_result_mode \
1356 (this_target_builtins->x_apply_result_mode)
1358 /* Return the size required for the block returned by __builtin_apply_args,
1359 and initialize apply_args_mode. */
1361 static int
1362 apply_args_size (void)
1364 static int size = -1;
1365 int align;
1366 unsigned int regno;
1367 machine_mode mode;
1369 /* The values computed by this function never change. */
1370 if (size < 0)
1372 /* The first value is the incoming arg-pointer. */
1373 size = GET_MODE_SIZE (Pmode);
1375 /* The second value is the structure value address unless this is
1376 passed as an "invisible" first argument. */
1377 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1378 size += GET_MODE_SIZE (Pmode);
1380 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1381 if (FUNCTION_ARG_REGNO_P (regno))
1383 mode = targetm.calls.get_raw_arg_mode (regno);
1385 gcc_assert (mode != VOIDmode);
1387 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1388 if (size % align != 0)
1389 size = CEIL (size, align) * align;
1390 size += GET_MODE_SIZE (mode);
1391 apply_args_mode[regno] = mode;
1393 else
1395 apply_args_mode[regno] = VOIDmode;
1398 return size;
1401 /* Return the size required for the block returned by __builtin_apply,
1402 and initialize apply_result_mode. */
1404 static int
1405 apply_result_size (void)
1407 static int size = -1;
1408 int align, regno;
1409 machine_mode mode;
1411 /* The values computed by this function never change. */
1412 if (size < 0)
1414 size = 0;
1416 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1417 if (targetm.calls.function_value_regno_p (regno))
1419 mode = targetm.calls.get_raw_result_mode (regno);
1421 gcc_assert (mode != VOIDmode);
1423 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1424 if (size % align != 0)
1425 size = CEIL (size, align) * align;
1426 size += GET_MODE_SIZE (mode);
1427 apply_result_mode[regno] = mode;
1429 else
1430 apply_result_mode[regno] = VOIDmode;
1432 /* Allow targets that use untyped_call and untyped_return to override
1433 the size so that machine-specific information can be stored here. */
1434 #ifdef APPLY_RESULT_SIZE
1435 size = APPLY_RESULT_SIZE;
1436 #endif
1438 return size;
1441 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1442 /* Create a vector describing the result block RESULT. If SAVEP is true,
1443 the result block is used to save the values; otherwise it is used to
1444 restore the values. */
1446 static rtx
1447 result_vector (int savep, rtx result)
1449 int regno, size, align, nelts;
1450 machine_mode mode;
1451 rtx reg, mem;
1452 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1454 size = nelts = 0;
1455 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1456 if ((mode = apply_result_mode[regno]) != VOIDmode)
1458 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1459 if (size % align != 0)
1460 size = CEIL (size, align) * align;
1461 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1462 mem = adjust_address (result, mode, size);
1463 savevec[nelts++] = (savep
1464 ? gen_rtx_SET (VOIDmode, mem, reg)
1465 : gen_rtx_SET (VOIDmode, reg, mem));
1466 size += GET_MODE_SIZE (mode);
1468 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1470 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1472 /* Save the state required to perform an untyped call with the same
1473 arguments as were passed to the current function. */
1475 static rtx
1476 expand_builtin_apply_args_1 (void)
1478 rtx registers, tem;
1479 int size, align, regno;
1480 machine_mode mode;
1481 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1483 /* Create a block where the arg-pointer, structure value address,
1484 and argument registers can be saved. */
1485 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1487 /* Walk past the arg-pointer and structure value address. */
1488 size = GET_MODE_SIZE (Pmode);
1489 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1490 size += GET_MODE_SIZE (Pmode);
1492 /* Save each register used in calling a function to the block. */
1493 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1494 if ((mode = apply_args_mode[regno]) != VOIDmode)
1496 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1497 if (size % align != 0)
1498 size = CEIL (size, align) * align;
1500 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1502 emit_move_insn (adjust_address (registers, mode, size), tem);
1503 size += GET_MODE_SIZE (mode);
1506 /* Save the arg pointer to the block. */
1507 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1508 #ifdef STACK_GROWS_DOWNWARD
1509 /* We need the pointer as the caller actually passed them to us, not
1510 as we might have pretended they were passed. Make sure it's a valid
1511 operand, as emit_move_insn isn't expected to handle a PLUS. */
1513 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1514 NULL_RTX);
1515 #endif
1516 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1518 size = GET_MODE_SIZE (Pmode);
1520 /* Save the structure value address unless this is passed as an
1521 "invisible" first argument. */
1522 if (struct_incoming_value)
1524 emit_move_insn (adjust_address (registers, Pmode, size),
1525 copy_to_reg (struct_incoming_value));
1526 size += GET_MODE_SIZE (Pmode);
1529 /* Return the address of the block. */
1530 return copy_addr_to_reg (XEXP (registers, 0));
1533 /* __builtin_apply_args returns block of memory allocated on
1534 the stack into which is stored the arg pointer, structure
1535 value address, static chain, and all the registers that might
1536 possibly be used in performing a function call. The code is
1537 moved to the start of the function so the incoming values are
1538 saved. */
1540 static rtx
1541 expand_builtin_apply_args (void)
1543 /* Don't do __builtin_apply_args more than once in a function.
1544 Save the result of the first call and reuse it. */
1545 if (apply_args_value != 0)
1546 return apply_args_value;
1548 /* When this function is called, it means that registers must be
1549 saved on entry to this function. So we migrate the
1550 call to the first insn of this function. */
1551 rtx temp;
1552 rtx seq;
1554 start_sequence ();
1555 temp = expand_builtin_apply_args_1 ();
1556 seq = get_insns ();
1557 end_sequence ();
1559 apply_args_value = temp;
1561 /* Put the insns after the NOTE that starts the function.
1562 If this is inside a start_sequence, make the outer-level insn
1563 chain current, so the code is placed at the start of the
1564 function. If internal_arg_pointer is a non-virtual pseudo,
1565 it needs to be placed after the function that initializes
1566 that pseudo. */
1567 push_topmost_sequence ();
1568 if (REG_P (crtl->args.internal_arg_pointer)
1569 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1570 emit_insn_before (seq, parm_birth_insn);
1571 else
1572 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1573 pop_topmost_sequence ();
1574 return temp;
1578 /* Perform an untyped call and save the state required to perform an
1579 untyped return of whatever value was returned by the given function. */
1581 static rtx
1582 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1584 int size, align, regno;
1585 machine_mode mode;
1586 rtx incoming_args, result, reg, dest, src;
1587 rtx_call_insn *call_insn;
1588 rtx old_stack_level = 0;
1589 rtx call_fusage = 0;
1590 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1592 arguments = convert_memory_address (Pmode, arguments);
1594 /* Create a block where the return registers can be saved. */
1595 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1597 /* Fetch the arg pointer from the ARGUMENTS block. */
1598 incoming_args = gen_reg_rtx (Pmode);
1599 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1600 #ifndef STACK_GROWS_DOWNWARD
1601 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1602 incoming_args, 0, OPTAB_LIB_WIDEN);
1603 #endif
1605 /* Push a new argument block and copy the arguments. Do not allow
1606 the (potential) memcpy call below to interfere with our stack
1607 manipulations. */
1608 do_pending_stack_adjust ();
1609 NO_DEFER_POP;
1611 /* Save the stack with nonlocal if available. */
1612 #ifdef HAVE_save_stack_nonlocal
1613 if (HAVE_save_stack_nonlocal)
1614 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1615 else
1616 #endif
1617 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1619 /* Allocate a block of memory onto the stack and copy the memory
1620 arguments to the outgoing arguments address. We can pass TRUE
1621 as the 4th argument because we just saved the stack pointer
1622 and will restore it right after the call. */
1623 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1625 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1626 may have already set current_function_calls_alloca to true.
1627 current_function_calls_alloca won't be set if argsize is zero,
1628 so we have to guarantee need_drap is true here. */
1629 if (SUPPORTS_STACK_ALIGNMENT)
1630 crtl->need_drap = true;
1632 dest = virtual_outgoing_args_rtx;
1633 #ifndef STACK_GROWS_DOWNWARD
1634 if (CONST_INT_P (argsize))
1635 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1636 else
1637 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1638 #endif
1639 dest = gen_rtx_MEM (BLKmode, dest);
1640 set_mem_align (dest, PARM_BOUNDARY);
1641 src = gen_rtx_MEM (BLKmode, incoming_args);
1642 set_mem_align (src, PARM_BOUNDARY);
1643 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1645 /* Refer to the argument block. */
1646 apply_args_size ();
1647 arguments = gen_rtx_MEM (BLKmode, arguments);
1648 set_mem_align (arguments, PARM_BOUNDARY);
1650 /* Walk past the arg-pointer and structure value address. */
1651 size = GET_MODE_SIZE (Pmode);
1652 if (struct_value)
1653 size += GET_MODE_SIZE (Pmode);
1655 /* Restore each of the registers previously saved. Make USE insns
1656 for each of these registers for use in making the call. */
1657 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1658 if ((mode = apply_args_mode[regno]) != VOIDmode)
1660 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1661 if (size % align != 0)
1662 size = CEIL (size, align) * align;
1663 reg = gen_rtx_REG (mode, regno);
1664 emit_move_insn (reg, adjust_address (arguments, mode, size));
1665 use_reg (&call_fusage, reg);
1666 size += GET_MODE_SIZE (mode);
1669 /* Restore the structure value address unless this is passed as an
1670 "invisible" first argument. */
1671 size = GET_MODE_SIZE (Pmode);
1672 if (struct_value)
1674 rtx value = gen_reg_rtx (Pmode);
1675 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1676 emit_move_insn (struct_value, value);
1677 if (REG_P (struct_value))
1678 use_reg (&call_fusage, struct_value);
1679 size += GET_MODE_SIZE (Pmode);
1682 /* All arguments and registers used for the call are set up by now! */
1683 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1685 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1686 and we don't want to load it into a register as an optimization,
1687 because prepare_call_address already did it if it should be done. */
1688 if (GET_CODE (function) != SYMBOL_REF)
1689 function = memory_address (FUNCTION_MODE, function);
1691 /* Generate the actual call instruction and save the return value. */
1692 #ifdef HAVE_untyped_call
1693 if (HAVE_untyped_call)
1694 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1695 result, result_vector (1, result)));
1696 else
1697 #endif
1698 #ifdef HAVE_call_value
1699 if (HAVE_call_value)
1701 rtx valreg = 0;
1703 /* Locate the unique return register. It is not possible to
1704 express a call that sets more than one return register using
1705 call_value; use untyped_call for that. In fact, untyped_call
1706 only needs to save the return registers in the given block. */
1707 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1708 if ((mode = apply_result_mode[regno]) != VOIDmode)
1710 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1712 valreg = gen_rtx_REG (mode, regno);
1715 emit_call_insn (GEN_CALL_VALUE (valreg,
1716 gen_rtx_MEM (FUNCTION_MODE, function),
1717 const0_rtx, NULL_RTX, const0_rtx));
1719 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1721 else
1722 #endif
1723 gcc_unreachable ();
1725 /* Find the CALL insn we just emitted, and attach the register usage
1726 information. */
1727 call_insn = last_call_insn ();
1728 add_function_usage_to (call_insn, call_fusage);
1730 /* Restore the stack. */
1731 #ifdef HAVE_save_stack_nonlocal
1732 if (HAVE_save_stack_nonlocal)
1733 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1734 else
1735 #endif
1736 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1737 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1739 OK_DEFER_POP;
1741 /* Return the address of the result block. */
1742 result = copy_addr_to_reg (XEXP (result, 0));
1743 return convert_memory_address (ptr_mode, result);
1746 /* Perform an untyped return. */
1748 static void
1749 expand_builtin_return (rtx result)
1751 int size, align, regno;
1752 machine_mode mode;
1753 rtx reg;
1754 rtx_insn *call_fusage = 0;
1756 result = convert_memory_address (Pmode, result);
1758 apply_result_size ();
1759 result = gen_rtx_MEM (BLKmode, result);
1761 #ifdef HAVE_untyped_return
1762 if (HAVE_untyped_return)
1764 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1765 emit_barrier ();
1766 return;
1768 #endif
1770 /* Restore the return value and note that each value is used. */
1771 size = 0;
1772 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1773 if ((mode = apply_result_mode[regno]) != VOIDmode)
1775 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1776 if (size % align != 0)
1777 size = CEIL (size, align) * align;
1778 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1779 emit_move_insn (reg, adjust_address (result, mode, size));
1781 push_to_sequence (call_fusage);
1782 emit_use (reg);
1783 call_fusage = get_insns ();
1784 end_sequence ();
1785 size += GET_MODE_SIZE (mode);
1788 /* Put the USE insns before the return. */
1789 emit_insn (call_fusage);
1791 /* Return whatever values was restored by jumping directly to the end
1792 of the function. */
1793 expand_naked_return ();
1796 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1798 static enum type_class
1799 type_to_class (tree type)
1801 switch (TREE_CODE (type))
1803 case VOID_TYPE: return void_type_class;
1804 case INTEGER_TYPE: return integer_type_class;
1805 case ENUMERAL_TYPE: return enumeral_type_class;
1806 case BOOLEAN_TYPE: return boolean_type_class;
1807 case POINTER_TYPE: return pointer_type_class;
1808 case REFERENCE_TYPE: return reference_type_class;
1809 case OFFSET_TYPE: return offset_type_class;
1810 case REAL_TYPE: return real_type_class;
1811 case COMPLEX_TYPE: return complex_type_class;
1812 case FUNCTION_TYPE: return function_type_class;
1813 case METHOD_TYPE: return method_type_class;
1814 case RECORD_TYPE: return record_type_class;
1815 case UNION_TYPE:
1816 case QUAL_UNION_TYPE: return union_type_class;
1817 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1818 ? string_type_class : array_type_class);
1819 case LANG_TYPE: return lang_type_class;
1820 default: return no_type_class;
1824 /* Expand a call EXP to __builtin_classify_type. */
1826 static rtx
1827 expand_builtin_classify_type (tree exp)
1829 if (call_expr_nargs (exp))
1830 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1831 return GEN_INT (no_type_class);
1834 /* This helper macro, meant to be used in mathfn_built_in below,
1835 determines which among a set of three builtin math functions is
1836 appropriate for a given type mode. The `F' and `L' cases are
1837 automatically generated from the `double' case. */
1838 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1839 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1840 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1841 fcodel = BUILT_IN_MATHFN##L ; break;
1842 /* Similar to above, but appends _R after any F/L suffix. */
1843 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1844 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1845 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1846 fcodel = BUILT_IN_MATHFN##L_R ; break;
1848 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1849 if available. If IMPLICIT is true use the implicit builtin declaration,
1850 otherwise use the explicit declaration. If we can't do the conversion,
1851 return zero. */
1853 static tree
1854 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1856 enum built_in_function fcode, fcodef, fcodel, fcode2;
1858 switch (fn)
1860 CASE_MATHFN (BUILT_IN_ACOS)
1861 CASE_MATHFN (BUILT_IN_ACOSH)
1862 CASE_MATHFN (BUILT_IN_ASIN)
1863 CASE_MATHFN (BUILT_IN_ASINH)
1864 CASE_MATHFN (BUILT_IN_ATAN)
1865 CASE_MATHFN (BUILT_IN_ATAN2)
1866 CASE_MATHFN (BUILT_IN_ATANH)
1867 CASE_MATHFN (BUILT_IN_CBRT)
1868 CASE_MATHFN (BUILT_IN_CEIL)
1869 CASE_MATHFN (BUILT_IN_CEXPI)
1870 CASE_MATHFN (BUILT_IN_COPYSIGN)
1871 CASE_MATHFN (BUILT_IN_COS)
1872 CASE_MATHFN (BUILT_IN_COSH)
1873 CASE_MATHFN (BUILT_IN_DREM)
1874 CASE_MATHFN (BUILT_IN_ERF)
1875 CASE_MATHFN (BUILT_IN_ERFC)
1876 CASE_MATHFN (BUILT_IN_EXP)
1877 CASE_MATHFN (BUILT_IN_EXP10)
1878 CASE_MATHFN (BUILT_IN_EXP2)
1879 CASE_MATHFN (BUILT_IN_EXPM1)
1880 CASE_MATHFN (BUILT_IN_FABS)
1881 CASE_MATHFN (BUILT_IN_FDIM)
1882 CASE_MATHFN (BUILT_IN_FLOOR)
1883 CASE_MATHFN (BUILT_IN_FMA)
1884 CASE_MATHFN (BUILT_IN_FMAX)
1885 CASE_MATHFN (BUILT_IN_FMIN)
1886 CASE_MATHFN (BUILT_IN_FMOD)
1887 CASE_MATHFN (BUILT_IN_FREXP)
1888 CASE_MATHFN (BUILT_IN_GAMMA)
1889 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1890 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1891 CASE_MATHFN (BUILT_IN_HYPOT)
1892 CASE_MATHFN (BUILT_IN_ILOGB)
1893 CASE_MATHFN (BUILT_IN_ICEIL)
1894 CASE_MATHFN (BUILT_IN_IFLOOR)
1895 CASE_MATHFN (BUILT_IN_INF)
1896 CASE_MATHFN (BUILT_IN_IRINT)
1897 CASE_MATHFN (BUILT_IN_IROUND)
1898 CASE_MATHFN (BUILT_IN_ISINF)
1899 CASE_MATHFN (BUILT_IN_J0)
1900 CASE_MATHFN (BUILT_IN_J1)
1901 CASE_MATHFN (BUILT_IN_JN)
1902 CASE_MATHFN (BUILT_IN_LCEIL)
1903 CASE_MATHFN (BUILT_IN_LDEXP)
1904 CASE_MATHFN (BUILT_IN_LFLOOR)
1905 CASE_MATHFN (BUILT_IN_LGAMMA)
1906 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1907 CASE_MATHFN (BUILT_IN_LLCEIL)
1908 CASE_MATHFN (BUILT_IN_LLFLOOR)
1909 CASE_MATHFN (BUILT_IN_LLRINT)
1910 CASE_MATHFN (BUILT_IN_LLROUND)
1911 CASE_MATHFN (BUILT_IN_LOG)
1912 CASE_MATHFN (BUILT_IN_LOG10)
1913 CASE_MATHFN (BUILT_IN_LOG1P)
1914 CASE_MATHFN (BUILT_IN_LOG2)
1915 CASE_MATHFN (BUILT_IN_LOGB)
1916 CASE_MATHFN (BUILT_IN_LRINT)
1917 CASE_MATHFN (BUILT_IN_LROUND)
1918 CASE_MATHFN (BUILT_IN_MODF)
1919 CASE_MATHFN (BUILT_IN_NAN)
1920 CASE_MATHFN (BUILT_IN_NANS)
1921 CASE_MATHFN (BUILT_IN_NEARBYINT)
1922 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1923 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1924 CASE_MATHFN (BUILT_IN_POW)
1925 CASE_MATHFN (BUILT_IN_POWI)
1926 CASE_MATHFN (BUILT_IN_POW10)
1927 CASE_MATHFN (BUILT_IN_REMAINDER)
1928 CASE_MATHFN (BUILT_IN_REMQUO)
1929 CASE_MATHFN (BUILT_IN_RINT)
1930 CASE_MATHFN (BUILT_IN_ROUND)
1931 CASE_MATHFN (BUILT_IN_SCALB)
1932 CASE_MATHFN (BUILT_IN_SCALBLN)
1933 CASE_MATHFN (BUILT_IN_SCALBN)
1934 CASE_MATHFN (BUILT_IN_SIGNBIT)
1935 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1936 CASE_MATHFN (BUILT_IN_SIN)
1937 CASE_MATHFN (BUILT_IN_SINCOS)
1938 CASE_MATHFN (BUILT_IN_SINH)
1939 CASE_MATHFN (BUILT_IN_SQRT)
1940 CASE_MATHFN (BUILT_IN_TAN)
1941 CASE_MATHFN (BUILT_IN_TANH)
1942 CASE_MATHFN (BUILT_IN_TGAMMA)
1943 CASE_MATHFN (BUILT_IN_TRUNC)
1944 CASE_MATHFN (BUILT_IN_Y0)
1945 CASE_MATHFN (BUILT_IN_Y1)
1946 CASE_MATHFN (BUILT_IN_YN)
1948 default:
1949 return NULL_TREE;
1952 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1953 fcode2 = fcode;
1954 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1955 fcode2 = fcodef;
1956 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1957 fcode2 = fcodel;
1958 else
1959 return NULL_TREE;
1961 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1962 return NULL_TREE;
1964 return builtin_decl_explicit (fcode2);
1967 /* Like mathfn_built_in_1(), but always use the implicit array. */
1969 tree
1970 mathfn_built_in (tree type, enum built_in_function fn)
1972 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1975 /* If errno must be maintained, expand the RTL to check if the result,
1976 TARGET, of a built-in function call, EXP, is NaN, and if so set
1977 errno to EDOM. */
1979 static void
1980 expand_errno_check (tree exp, rtx target)
1982 rtx_code_label *lab = gen_label_rtx ();
1984 /* Test the result; if it is NaN, set errno=EDOM because
1985 the argument was not in the domain. */
1986 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1987 NULL_RTX, NULL_RTX, lab,
1988 /* The jump is very likely. */
1989 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1991 #ifdef TARGET_EDOM
1992 /* If this built-in doesn't throw an exception, set errno directly. */
1993 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1995 #ifdef GEN_ERRNO_RTX
1996 rtx errno_rtx = GEN_ERRNO_RTX;
1997 #else
1998 rtx errno_rtx
1999 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2000 #endif
2001 emit_move_insn (errno_rtx,
2002 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2003 emit_label (lab);
2004 return;
2006 #endif
2008 /* Make sure the library call isn't expanded as a tail call. */
2009 CALL_EXPR_TAILCALL (exp) = 0;
2011 /* We can't set errno=EDOM directly; let the library call do it.
2012 Pop the arguments right away in case the call gets deleted. */
2013 NO_DEFER_POP;
2014 expand_call (exp, target, 0);
2015 OK_DEFER_POP;
2016 emit_label (lab);
2019 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2020 Return NULL_RTX if a normal call should be emitted rather than expanding
2021 the function in-line. EXP is the expression that is a call to the builtin
2022 function; if convenient, the result should be placed in TARGET.
2023 SUBTARGET may be used as the target for computing one of EXP's operands. */
2025 static rtx
2026 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2028 optab builtin_optab;
2029 rtx op0;
2030 rtx_insn *insns;
2031 tree fndecl = get_callee_fndecl (exp);
2032 machine_mode mode;
2033 bool errno_set = false;
2034 bool try_widening = false;
2035 tree arg;
2037 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2038 return NULL_RTX;
2040 arg = CALL_EXPR_ARG (exp, 0);
2042 switch (DECL_FUNCTION_CODE (fndecl))
2044 CASE_FLT_FN (BUILT_IN_SQRT):
2045 errno_set = ! tree_expr_nonnegative_p (arg);
2046 try_widening = true;
2047 builtin_optab = sqrt_optab;
2048 break;
2049 CASE_FLT_FN (BUILT_IN_EXP):
2050 errno_set = true; builtin_optab = exp_optab; break;
2051 CASE_FLT_FN (BUILT_IN_EXP10):
2052 CASE_FLT_FN (BUILT_IN_POW10):
2053 errno_set = true; builtin_optab = exp10_optab; break;
2054 CASE_FLT_FN (BUILT_IN_EXP2):
2055 errno_set = true; builtin_optab = exp2_optab; break;
2056 CASE_FLT_FN (BUILT_IN_EXPM1):
2057 errno_set = true; builtin_optab = expm1_optab; break;
2058 CASE_FLT_FN (BUILT_IN_LOGB):
2059 errno_set = true; builtin_optab = logb_optab; break;
2060 CASE_FLT_FN (BUILT_IN_LOG):
2061 errno_set = true; builtin_optab = log_optab; break;
2062 CASE_FLT_FN (BUILT_IN_LOG10):
2063 errno_set = true; builtin_optab = log10_optab; break;
2064 CASE_FLT_FN (BUILT_IN_LOG2):
2065 errno_set = true; builtin_optab = log2_optab; break;
2066 CASE_FLT_FN (BUILT_IN_LOG1P):
2067 errno_set = true; builtin_optab = log1p_optab; break;
2068 CASE_FLT_FN (BUILT_IN_ASIN):
2069 builtin_optab = asin_optab; break;
2070 CASE_FLT_FN (BUILT_IN_ACOS):
2071 builtin_optab = acos_optab; break;
2072 CASE_FLT_FN (BUILT_IN_TAN):
2073 builtin_optab = tan_optab; break;
2074 CASE_FLT_FN (BUILT_IN_ATAN):
2075 builtin_optab = atan_optab; break;
2076 CASE_FLT_FN (BUILT_IN_FLOOR):
2077 builtin_optab = floor_optab; break;
2078 CASE_FLT_FN (BUILT_IN_CEIL):
2079 builtin_optab = ceil_optab; break;
2080 CASE_FLT_FN (BUILT_IN_TRUNC):
2081 builtin_optab = btrunc_optab; break;
2082 CASE_FLT_FN (BUILT_IN_ROUND):
2083 builtin_optab = round_optab; break;
2084 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2085 builtin_optab = nearbyint_optab;
2086 if (flag_trapping_math)
2087 break;
2088 /* Else fallthrough and expand as rint. */
2089 CASE_FLT_FN (BUILT_IN_RINT):
2090 builtin_optab = rint_optab; break;
2091 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2092 builtin_optab = significand_optab; break;
2093 default:
2094 gcc_unreachable ();
2097 /* Make a suitable register to place result in. */
2098 mode = TYPE_MODE (TREE_TYPE (exp));
2100 if (! flag_errno_math || ! HONOR_NANS (mode))
2101 errno_set = false;
2103 /* Before working hard, check whether the instruction is available, but try
2104 to widen the mode for specific operations. */
2105 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2106 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2107 && (!errno_set || !optimize_insn_for_size_p ()))
2109 rtx result = gen_reg_rtx (mode);
2111 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2112 need to expand the argument again. This way, we will not perform
2113 side-effects more the once. */
2114 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2116 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2118 start_sequence ();
2120 /* Compute into RESULT.
2121 Set RESULT to wherever the result comes back. */
2122 result = expand_unop (mode, builtin_optab, op0, result, 0);
2124 if (result != 0)
2126 if (errno_set)
2127 expand_errno_check (exp, result);
2129 /* Output the entire sequence. */
2130 insns = get_insns ();
2131 end_sequence ();
2132 emit_insn (insns);
2133 return result;
2136 /* If we were unable to expand via the builtin, stop the sequence
2137 (without outputting the insns) and call to the library function
2138 with the stabilized argument list. */
2139 end_sequence ();
2142 return expand_call (exp, target, target == const0_rtx);
2145 /* Expand a call to the builtin binary math functions (pow and atan2).
2146 Return NULL_RTX if a normal call should be emitted rather than expanding the
2147 function in-line. EXP is the expression that is a call to the builtin
2148 function; if convenient, the result should be placed in TARGET.
2149 SUBTARGET may be used as the target for computing one of EXP's
2150 operands. */
2152 static rtx
2153 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2155 optab builtin_optab;
2156 rtx op0, op1, result;
2157 rtx_insn *insns;
2158 int op1_type = REAL_TYPE;
2159 tree fndecl = get_callee_fndecl (exp);
2160 tree arg0, arg1;
2161 machine_mode mode;
2162 bool errno_set = true;
2164 switch (DECL_FUNCTION_CODE (fndecl))
2166 CASE_FLT_FN (BUILT_IN_SCALBN):
2167 CASE_FLT_FN (BUILT_IN_SCALBLN):
2168 CASE_FLT_FN (BUILT_IN_LDEXP):
2169 op1_type = INTEGER_TYPE;
2170 default:
2171 break;
2174 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2175 return NULL_RTX;
2177 arg0 = CALL_EXPR_ARG (exp, 0);
2178 arg1 = CALL_EXPR_ARG (exp, 1);
2180 switch (DECL_FUNCTION_CODE (fndecl))
2182 CASE_FLT_FN (BUILT_IN_POW):
2183 builtin_optab = pow_optab; break;
2184 CASE_FLT_FN (BUILT_IN_ATAN2):
2185 builtin_optab = atan2_optab; break;
2186 CASE_FLT_FN (BUILT_IN_SCALB):
2187 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2188 return 0;
2189 builtin_optab = scalb_optab; break;
2190 CASE_FLT_FN (BUILT_IN_SCALBN):
2191 CASE_FLT_FN (BUILT_IN_SCALBLN):
2192 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2193 return 0;
2194 /* Fall through... */
2195 CASE_FLT_FN (BUILT_IN_LDEXP):
2196 builtin_optab = ldexp_optab; break;
2197 CASE_FLT_FN (BUILT_IN_FMOD):
2198 builtin_optab = fmod_optab; break;
2199 CASE_FLT_FN (BUILT_IN_REMAINDER):
2200 CASE_FLT_FN (BUILT_IN_DREM):
2201 builtin_optab = remainder_optab; break;
2202 default:
2203 gcc_unreachable ();
2206 /* Make a suitable register to place result in. */
2207 mode = TYPE_MODE (TREE_TYPE (exp));
2209 /* Before working hard, check whether the instruction is available. */
2210 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2211 return NULL_RTX;
2213 result = gen_reg_rtx (mode);
2215 if (! flag_errno_math || ! HONOR_NANS (mode))
2216 errno_set = false;
2218 if (errno_set && optimize_insn_for_size_p ())
2219 return 0;
2221 /* Always stabilize the argument list. */
2222 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2223 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2225 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2226 op1 = expand_normal (arg1);
2228 start_sequence ();
2230 /* Compute into RESULT.
2231 Set RESULT to wherever the result comes back. */
2232 result = expand_binop (mode, builtin_optab, op0, op1,
2233 result, 0, OPTAB_DIRECT);
2235 /* If we were unable to expand via the builtin, stop the sequence
2236 (without outputting the insns) and call to the library function
2237 with the stabilized argument list. */
2238 if (result == 0)
2240 end_sequence ();
2241 return expand_call (exp, target, target == const0_rtx);
2244 if (errno_set)
2245 expand_errno_check (exp, result);
2247 /* Output the entire sequence. */
2248 insns = get_insns ();
2249 end_sequence ();
2250 emit_insn (insns);
2252 return result;
2255 /* Expand a call to the builtin trinary math functions (fma).
2256 Return NULL_RTX if a normal call should be emitted rather than expanding the
2257 function in-line. EXP is the expression that is a call to the builtin
2258 function; if convenient, the result should be placed in TARGET.
2259 SUBTARGET may be used as the target for computing one of EXP's
2260 operands. */
2262 static rtx
2263 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2265 optab builtin_optab;
2266 rtx op0, op1, op2, result;
2267 rtx_insn *insns;
2268 tree fndecl = get_callee_fndecl (exp);
2269 tree arg0, arg1, arg2;
2270 machine_mode mode;
2272 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2273 return NULL_RTX;
2275 arg0 = CALL_EXPR_ARG (exp, 0);
2276 arg1 = CALL_EXPR_ARG (exp, 1);
2277 arg2 = CALL_EXPR_ARG (exp, 2);
2279 switch (DECL_FUNCTION_CODE (fndecl))
2281 CASE_FLT_FN (BUILT_IN_FMA):
2282 builtin_optab = fma_optab; break;
2283 default:
2284 gcc_unreachable ();
2287 /* Make a suitable register to place result in. */
2288 mode = TYPE_MODE (TREE_TYPE (exp));
2290 /* Before working hard, check whether the instruction is available. */
2291 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2292 return NULL_RTX;
2294 result = gen_reg_rtx (mode);
2296 /* Always stabilize the argument list. */
2297 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2298 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2299 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2301 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2302 op1 = expand_normal (arg1);
2303 op2 = expand_normal (arg2);
2305 start_sequence ();
2307 /* Compute into RESULT.
2308 Set RESULT to wherever the result comes back. */
2309 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2310 result, 0);
2312 /* If we were unable to expand via the builtin, stop the sequence
2313 (without outputting the insns) and call to the library function
2314 with the stabilized argument list. */
2315 if (result == 0)
2317 end_sequence ();
2318 return expand_call (exp, target, target == const0_rtx);
2321 /* Output the entire sequence. */
2322 insns = get_insns ();
2323 end_sequence ();
2324 emit_insn (insns);
2326 return result;
2329 /* Expand a call to the builtin sin and cos math functions.
2330 Return NULL_RTX if a normal call should be emitted rather than expanding the
2331 function in-line. EXP is the expression that is a call to the builtin
2332 function; if convenient, the result should be placed in TARGET.
2333 SUBTARGET may be used as the target for computing one of EXP's
2334 operands. */
2336 static rtx
2337 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2339 optab builtin_optab;
2340 rtx op0;
2341 rtx_insn *insns;
2342 tree fndecl = get_callee_fndecl (exp);
2343 machine_mode mode;
2344 tree arg;
2346 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2347 return NULL_RTX;
2349 arg = CALL_EXPR_ARG (exp, 0);
2351 switch (DECL_FUNCTION_CODE (fndecl))
2353 CASE_FLT_FN (BUILT_IN_SIN):
2354 CASE_FLT_FN (BUILT_IN_COS):
2355 builtin_optab = sincos_optab; break;
2356 default:
2357 gcc_unreachable ();
2360 /* Make a suitable register to place result in. */
2361 mode = TYPE_MODE (TREE_TYPE (exp));
2363 /* Check if sincos insn is available, otherwise fallback
2364 to sin or cos insn. */
2365 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2366 switch (DECL_FUNCTION_CODE (fndecl))
2368 CASE_FLT_FN (BUILT_IN_SIN):
2369 builtin_optab = sin_optab; break;
2370 CASE_FLT_FN (BUILT_IN_COS):
2371 builtin_optab = cos_optab; break;
2372 default:
2373 gcc_unreachable ();
2376 /* Before working hard, check whether the instruction is available. */
2377 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2379 rtx result = gen_reg_rtx (mode);
2381 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2382 need to expand the argument again. This way, we will not perform
2383 side-effects more the once. */
2384 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2386 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2388 start_sequence ();
2390 /* Compute into RESULT.
2391 Set RESULT to wherever the result comes back. */
2392 if (builtin_optab == sincos_optab)
2394 int ok;
2396 switch (DECL_FUNCTION_CODE (fndecl))
2398 CASE_FLT_FN (BUILT_IN_SIN):
2399 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2400 break;
2401 CASE_FLT_FN (BUILT_IN_COS):
2402 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2403 break;
2404 default:
2405 gcc_unreachable ();
2407 gcc_assert (ok);
2409 else
2410 result = expand_unop (mode, builtin_optab, op0, result, 0);
2412 if (result != 0)
2414 /* Output the entire sequence. */
2415 insns = get_insns ();
2416 end_sequence ();
2417 emit_insn (insns);
2418 return result;
2421 /* If we were unable to expand via the builtin, stop the sequence
2422 (without outputting the insns) and call to the library function
2423 with the stabilized argument list. */
2424 end_sequence ();
2427 return expand_call (exp, target, target == const0_rtx);
2430 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2431 return an RTL instruction code that implements the functionality.
2432 If that isn't possible or available return CODE_FOR_nothing. */
2434 static enum insn_code
2435 interclass_mathfn_icode (tree arg, tree fndecl)
2437 bool errno_set = false;
2438 optab builtin_optab = unknown_optab;
2439 machine_mode mode;
2441 switch (DECL_FUNCTION_CODE (fndecl))
2443 CASE_FLT_FN (BUILT_IN_ILOGB):
2444 errno_set = true; builtin_optab = ilogb_optab; break;
2445 CASE_FLT_FN (BUILT_IN_ISINF):
2446 builtin_optab = isinf_optab; break;
2447 case BUILT_IN_ISNORMAL:
2448 case BUILT_IN_ISFINITE:
2449 CASE_FLT_FN (BUILT_IN_FINITE):
2450 case BUILT_IN_FINITED32:
2451 case BUILT_IN_FINITED64:
2452 case BUILT_IN_FINITED128:
2453 case BUILT_IN_ISINFD32:
2454 case BUILT_IN_ISINFD64:
2455 case BUILT_IN_ISINFD128:
2456 /* These builtins have no optabs (yet). */
2457 break;
2458 default:
2459 gcc_unreachable ();
2462 /* There's no easy way to detect the case we need to set EDOM. */
2463 if (flag_errno_math && errno_set)
2464 return CODE_FOR_nothing;
2466 /* Optab mode depends on the mode of the input argument. */
2467 mode = TYPE_MODE (TREE_TYPE (arg));
2469 if (builtin_optab)
2470 return optab_handler (builtin_optab, mode);
2471 return CODE_FOR_nothing;
2474 /* Expand a call to one of the builtin math functions that operate on
2475 floating point argument and output an integer result (ilogb, isinf,
2476 isnan, etc).
2477 Return 0 if a normal call should be emitted rather than expanding the
2478 function in-line. EXP is the expression that is a call to the builtin
2479 function; if convenient, the result should be placed in TARGET. */
2481 static rtx
2482 expand_builtin_interclass_mathfn (tree exp, rtx target)
2484 enum insn_code icode = CODE_FOR_nothing;
2485 rtx op0;
2486 tree fndecl = get_callee_fndecl (exp);
2487 machine_mode mode;
2488 tree arg;
2490 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2491 return NULL_RTX;
2493 arg = CALL_EXPR_ARG (exp, 0);
2494 icode = interclass_mathfn_icode (arg, fndecl);
2495 mode = TYPE_MODE (TREE_TYPE (arg));
2497 if (icode != CODE_FOR_nothing)
2499 struct expand_operand ops[1];
2500 rtx_insn *last = get_last_insn ();
2501 tree orig_arg = arg;
2503 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2504 need to expand the argument again. This way, we will not perform
2505 side-effects more the once. */
2506 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2508 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2510 if (mode != GET_MODE (op0))
2511 op0 = convert_to_mode (mode, op0, 0);
2513 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2514 if (maybe_legitimize_operands (icode, 0, 1, ops)
2515 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2516 return ops[0].value;
2518 delete_insns_since (last);
2519 CALL_EXPR_ARG (exp, 0) = orig_arg;
2522 return NULL_RTX;
2525 /* Expand a call to the builtin sincos math function.
2526 Return NULL_RTX if a normal call should be emitted rather than expanding the
2527 function in-line. EXP is the expression that is a call to the builtin
2528 function. */
2530 static rtx
2531 expand_builtin_sincos (tree exp)
2533 rtx op0, op1, op2, target1, target2;
2534 machine_mode mode;
2535 tree arg, sinp, cosp;
2536 int result;
2537 location_t loc = EXPR_LOCATION (exp);
2538 tree alias_type, alias_off;
2540 if (!validate_arglist (exp, REAL_TYPE,
2541 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2542 return NULL_RTX;
2544 arg = CALL_EXPR_ARG (exp, 0);
2545 sinp = CALL_EXPR_ARG (exp, 1);
2546 cosp = CALL_EXPR_ARG (exp, 2);
2548 /* Make a suitable register to place result in. */
2549 mode = TYPE_MODE (TREE_TYPE (arg));
2551 /* Check if sincos insn is available, otherwise emit the call. */
2552 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2553 return NULL_RTX;
2555 target1 = gen_reg_rtx (mode);
2556 target2 = gen_reg_rtx (mode);
2558 op0 = expand_normal (arg);
2559 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2560 alias_off = build_int_cst (alias_type, 0);
2561 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2562 sinp, alias_off));
2563 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2564 cosp, alias_off));
2566 /* Compute into target1 and target2.
2567 Set TARGET to wherever the result comes back. */
2568 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2569 gcc_assert (result);
2571 /* Move target1 and target2 to the memory locations indicated
2572 by op1 and op2. */
2573 emit_move_insn (op1, target1);
2574 emit_move_insn (op2, target2);
2576 return const0_rtx;
2579 /* Expand a call to the internal cexpi builtin to the sincos math function.
2580 EXP is the expression that is a call to the builtin function; if convenient,
2581 the result should be placed in TARGET. */
2583 static rtx
2584 expand_builtin_cexpi (tree exp, rtx target)
2586 tree fndecl = get_callee_fndecl (exp);
2587 tree arg, type;
2588 machine_mode mode;
2589 rtx op0, op1, op2;
2590 location_t loc = EXPR_LOCATION (exp);
2592 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2593 return NULL_RTX;
2595 arg = CALL_EXPR_ARG (exp, 0);
2596 type = TREE_TYPE (arg);
2597 mode = TYPE_MODE (TREE_TYPE (arg));
2599 /* Try expanding via a sincos optab, fall back to emitting a libcall
2600 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2601 is only generated from sincos, cexp or if we have either of them. */
2602 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2604 op1 = gen_reg_rtx (mode);
2605 op2 = gen_reg_rtx (mode);
2607 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2609 /* Compute into op1 and op2. */
2610 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2612 else if (targetm.libc_has_function (function_sincos))
2614 tree call, fn = NULL_TREE;
2615 tree top1, top2;
2616 rtx op1a, op2a;
2618 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2619 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2620 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2621 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2622 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2623 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2624 else
2625 gcc_unreachable ();
2627 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2628 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2629 op1a = copy_addr_to_reg (XEXP (op1, 0));
2630 op2a = copy_addr_to_reg (XEXP (op2, 0));
2631 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2632 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2634 /* Make sure not to fold the sincos call again. */
2635 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2636 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2637 call, 3, arg, top1, top2));
2639 else
2641 tree call, fn = NULL_TREE, narg;
2642 tree ctype = build_complex_type (type);
2644 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2645 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2646 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2647 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2648 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2649 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2650 else
2651 gcc_unreachable ();
2653 /* If we don't have a decl for cexp create one. This is the
2654 friendliest fallback if the user calls __builtin_cexpi
2655 without full target C99 function support. */
2656 if (fn == NULL_TREE)
2658 tree fntype;
2659 const char *name = NULL;
2661 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2662 name = "cexpf";
2663 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2664 name = "cexp";
2665 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2666 name = "cexpl";
2668 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2669 fn = build_fn_decl (name, fntype);
2672 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2673 build_real (type, dconst0), arg);
2675 /* Make sure not to fold the cexp call again. */
2676 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2677 return expand_expr (build_call_nary (ctype, call, 1, narg),
2678 target, VOIDmode, EXPAND_NORMAL);
2681 /* Now build the proper return type. */
2682 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2683 make_tree (TREE_TYPE (arg), op2),
2684 make_tree (TREE_TYPE (arg), op1)),
2685 target, VOIDmode, EXPAND_NORMAL);
2688 /* Conveniently construct a function call expression. FNDECL names the
2689 function to be called, N is the number of arguments, and the "..."
2690 parameters are the argument expressions. Unlike build_call_exr
2691 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2693 static tree
2694 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2696 va_list ap;
2697 tree fntype = TREE_TYPE (fndecl);
2698 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2700 va_start (ap, n);
2701 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2702 va_end (ap);
2703 SET_EXPR_LOCATION (fn, loc);
2704 return fn;
2707 /* Expand a call to one of the builtin rounding functions gcc defines
2708 as an extension (lfloor and lceil). As these are gcc extensions we
2709 do not need to worry about setting errno to EDOM.
2710 If expanding via optab fails, lower expression to (int)(floor(x)).
2711 EXP is the expression that is a call to the builtin function;
2712 if convenient, the result should be placed in TARGET. */
2714 static rtx
2715 expand_builtin_int_roundingfn (tree exp, rtx target)
2717 convert_optab builtin_optab;
2718 rtx op0, tmp;
2719 rtx_insn *insns;
2720 tree fndecl = get_callee_fndecl (exp);
2721 enum built_in_function fallback_fn;
2722 tree fallback_fndecl;
2723 machine_mode mode;
2724 tree arg;
2726 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2727 gcc_unreachable ();
2729 arg = CALL_EXPR_ARG (exp, 0);
2731 switch (DECL_FUNCTION_CODE (fndecl))
2733 CASE_FLT_FN (BUILT_IN_ICEIL):
2734 CASE_FLT_FN (BUILT_IN_LCEIL):
2735 CASE_FLT_FN (BUILT_IN_LLCEIL):
2736 builtin_optab = lceil_optab;
2737 fallback_fn = BUILT_IN_CEIL;
2738 break;
2740 CASE_FLT_FN (BUILT_IN_IFLOOR):
2741 CASE_FLT_FN (BUILT_IN_LFLOOR):
2742 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2743 builtin_optab = lfloor_optab;
2744 fallback_fn = BUILT_IN_FLOOR;
2745 break;
2747 default:
2748 gcc_unreachable ();
2751 /* Make a suitable register to place result in. */
2752 mode = TYPE_MODE (TREE_TYPE (exp));
2754 target = gen_reg_rtx (mode);
2756 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2757 need to expand the argument again. This way, we will not perform
2758 side-effects more the once. */
2759 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2761 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2763 start_sequence ();
2765 /* Compute into TARGET. */
2766 if (expand_sfix_optab (target, op0, builtin_optab))
2768 /* Output the entire sequence. */
2769 insns = get_insns ();
2770 end_sequence ();
2771 emit_insn (insns);
2772 return target;
2775 /* If we were unable to expand via the builtin, stop the sequence
2776 (without outputting the insns). */
2777 end_sequence ();
2779 /* Fall back to floating point rounding optab. */
2780 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2782 /* For non-C99 targets we may end up without a fallback fndecl here
2783 if the user called __builtin_lfloor directly. In this case emit
2784 a call to the floor/ceil variants nevertheless. This should result
2785 in the best user experience for not full C99 targets. */
2786 if (fallback_fndecl == NULL_TREE)
2788 tree fntype;
2789 const char *name = NULL;
2791 switch (DECL_FUNCTION_CODE (fndecl))
2793 case BUILT_IN_ICEIL:
2794 case BUILT_IN_LCEIL:
2795 case BUILT_IN_LLCEIL:
2796 name = "ceil";
2797 break;
2798 case BUILT_IN_ICEILF:
2799 case BUILT_IN_LCEILF:
2800 case BUILT_IN_LLCEILF:
2801 name = "ceilf";
2802 break;
2803 case BUILT_IN_ICEILL:
2804 case BUILT_IN_LCEILL:
2805 case BUILT_IN_LLCEILL:
2806 name = "ceill";
2807 break;
2808 case BUILT_IN_IFLOOR:
2809 case BUILT_IN_LFLOOR:
2810 case BUILT_IN_LLFLOOR:
2811 name = "floor";
2812 break;
2813 case BUILT_IN_IFLOORF:
2814 case BUILT_IN_LFLOORF:
2815 case BUILT_IN_LLFLOORF:
2816 name = "floorf";
2817 break;
2818 case BUILT_IN_IFLOORL:
2819 case BUILT_IN_LFLOORL:
2820 case BUILT_IN_LLFLOORL:
2821 name = "floorl";
2822 break;
2823 default:
2824 gcc_unreachable ();
2827 fntype = build_function_type_list (TREE_TYPE (arg),
2828 TREE_TYPE (arg), NULL_TREE);
2829 fallback_fndecl = build_fn_decl (name, fntype);
2832 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2834 tmp = expand_normal (exp);
2835 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2837 /* Truncate the result of floating point optab to integer
2838 via expand_fix (). */
2839 target = gen_reg_rtx (mode);
2840 expand_fix (target, tmp, 0);
2842 return target;
2845 /* Expand a call to one of the builtin math functions doing integer
2846 conversion (lrint).
2847 Return 0 if a normal call should be emitted rather than expanding the
2848 function in-line. EXP is the expression that is a call to the builtin
2849 function; if convenient, the result should be placed in TARGET. */
2851 static rtx
2852 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2854 convert_optab builtin_optab;
2855 rtx op0;
2856 rtx_insn *insns;
2857 tree fndecl = get_callee_fndecl (exp);
2858 tree arg;
2859 machine_mode mode;
2860 enum built_in_function fallback_fn = BUILT_IN_NONE;
2862 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2863 gcc_unreachable ();
2865 arg = CALL_EXPR_ARG (exp, 0);
2867 switch (DECL_FUNCTION_CODE (fndecl))
2869 CASE_FLT_FN (BUILT_IN_IRINT):
2870 fallback_fn = BUILT_IN_LRINT;
2871 /* FALLTHRU */
2872 CASE_FLT_FN (BUILT_IN_LRINT):
2873 CASE_FLT_FN (BUILT_IN_LLRINT):
2874 builtin_optab = lrint_optab;
2875 break;
2877 CASE_FLT_FN (BUILT_IN_IROUND):
2878 fallback_fn = BUILT_IN_LROUND;
2879 /* FALLTHRU */
2880 CASE_FLT_FN (BUILT_IN_LROUND):
2881 CASE_FLT_FN (BUILT_IN_LLROUND):
2882 builtin_optab = lround_optab;
2883 break;
2885 default:
2886 gcc_unreachable ();
2889 /* There's no easy way to detect the case we need to set EDOM. */
2890 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2891 return NULL_RTX;
2893 /* Make a suitable register to place result in. */
2894 mode = TYPE_MODE (TREE_TYPE (exp));
2896 /* There's no easy way to detect the case we need to set EDOM. */
2897 if (!flag_errno_math)
2899 rtx result = gen_reg_rtx (mode);
2901 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2902 need to expand the argument again. This way, we will not perform
2903 side-effects more the once. */
2904 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2906 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2908 start_sequence ();
2910 if (expand_sfix_optab (result, op0, builtin_optab))
2912 /* Output the entire sequence. */
2913 insns = get_insns ();
2914 end_sequence ();
2915 emit_insn (insns);
2916 return result;
2919 /* If we were unable to expand via the builtin, stop the sequence
2920 (without outputting the insns) and call to the library function
2921 with the stabilized argument list. */
2922 end_sequence ();
2925 if (fallback_fn != BUILT_IN_NONE)
2927 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2928 targets, (int) round (x) should never be transformed into
2929 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2930 a call to lround in the hope that the target provides at least some
2931 C99 functions. This should result in the best user experience for
2932 not full C99 targets. */
2933 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2934 fallback_fn, 0);
2936 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2937 fallback_fndecl, 1, arg);
2939 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2940 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2941 return convert_to_mode (mode, target, 0);
2944 return expand_call (exp, target, target == const0_rtx);
2947 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2948 a normal call should be emitted rather than expanding the function
2949 in-line. EXP is the expression that is a call to the builtin
2950 function; if convenient, the result should be placed in TARGET. */
2952 static rtx
2953 expand_builtin_powi (tree exp, rtx target)
2955 tree arg0, arg1;
2956 rtx op0, op1;
2957 machine_mode mode;
2958 machine_mode mode2;
2960 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2961 return NULL_RTX;
2963 arg0 = CALL_EXPR_ARG (exp, 0);
2964 arg1 = CALL_EXPR_ARG (exp, 1);
2965 mode = TYPE_MODE (TREE_TYPE (exp));
2967 /* Emit a libcall to libgcc. */
2969 /* Mode of the 2nd argument must match that of an int. */
2970 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2972 if (target == NULL_RTX)
2973 target = gen_reg_rtx (mode);
2975 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2976 if (GET_MODE (op0) != mode)
2977 op0 = convert_to_mode (mode, op0, 0);
2978 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2979 if (GET_MODE (op1) != mode2)
2980 op1 = convert_to_mode (mode2, op1, 0);
2982 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2983 target, LCT_CONST, mode, 2,
2984 op0, mode, op1, mode2);
2986 return target;
2989 /* Expand expression EXP which is a call to the strlen builtin. Return
2990 NULL_RTX if we failed the caller should emit a normal call, otherwise
2991 try to get the result in TARGET, if convenient. */
2993 static rtx
2994 expand_builtin_strlen (tree exp, rtx target,
2995 machine_mode target_mode)
2997 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2998 return NULL_RTX;
2999 else
3001 struct expand_operand ops[4];
3002 rtx pat;
3003 tree len;
3004 tree src = CALL_EXPR_ARG (exp, 0);
3005 rtx src_reg;
3006 rtx_insn *before_strlen;
3007 machine_mode insn_mode = target_mode;
3008 enum insn_code icode = CODE_FOR_nothing;
3009 unsigned int align;
3011 /* If the length can be computed at compile-time, return it. */
3012 len = c_strlen (src, 0);
3013 if (len)
3014 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3016 /* If the length can be computed at compile-time and is constant
3017 integer, but there are side-effects in src, evaluate
3018 src for side-effects, then return len.
3019 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3020 can be optimized into: i++; x = 3; */
3021 len = c_strlen (src, 1);
3022 if (len && TREE_CODE (len) == INTEGER_CST)
3024 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3025 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3028 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3030 /* If SRC is not a pointer type, don't do this operation inline. */
3031 if (align == 0)
3032 return NULL_RTX;
3034 /* Bail out if we can't compute strlen in the right mode. */
3035 while (insn_mode != VOIDmode)
3037 icode = optab_handler (strlen_optab, insn_mode);
3038 if (icode != CODE_FOR_nothing)
3039 break;
3041 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3043 if (insn_mode == VOIDmode)
3044 return NULL_RTX;
3046 /* Make a place to hold the source address. We will not expand
3047 the actual source until we are sure that the expansion will
3048 not fail -- there are trees that cannot be expanded twice. */
3049 src_reg = gen_reg_rtx (Pmode);
3051 /* Mark the beginning of the strlen sequence so we can emit the
3052 source operand later. */
3053 before_strlen = get_last_insn ();
3055 create_output_operand (&ops[0], target, insn_mode);
3056 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3057 create_integer_operand (&ops[2], 0);
3058 create_integer_operand (&ops[3], align);
3059 if (!maybe_expand_insn (icode, 4, ops))
3060 return NULL_RTX;
3062 /* Now that we are assured of success, expand the source. */
3063 start_sequence ();
3064 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3065 if (pat != src_reg)
3067 #ifdef POINTERS_EXTEND_UNSIGNED
3068 if (GET_MODE (pat) != Pmode)
3069 pat = convert_to_mode (Pmode, pat,
3070 POINTERS_EXTEND_UNSIGNED);
3071 #endif
3072 emit_move_insn (src_reg, pat);
3074 pat = get_insns ();
3075 end_sequence ();
3077 if (before_strlen)
3078 emit_insn_after (pat, before_strlen);
3079 else
3080 emit_insn_before (pat, get_insns ());
3082 /* Return the value in the proper mode for this function. */
3083 if (GET_MODE (ops[0].value) == target_mode)
3084 target = ops[0].value;
3085 else if (target != 0)
3086 convert_move (target, ops[0].value, 0);
3087 else
3088 target = convert_to_mode (target_mode, ops[0].value, 0);
3090 return target;
3094 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3095 bytes from constant string DATA + OFFSET and return it as target
3096 constant. */
3098 static rtx
3099 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3100 machine_mode mode)
3102 const char *str = (const char *) data;
3104 gcc_assert (offset >= 0
3105 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3106 <= strlen (str) + 1));
3108 return c_readstr (str + offset, mode);
3111 /* LEN specify length of the block of memcpy/memset operation.
3112 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3113 In some cases we can make very likely guess on max size, then we
3114 set it into PROBABLE_MAX_SIZE. */
3116 static void
3117 determine_block_size (tree len, rtx len_rtx,
3118 unsigned HOST_WIDE_INT *min_size,
3119 unsigned HOST_WIDE_INT *max_size,
3120 unsigned HOST_WIDE_INT *probable_max_size)
3122 if (CONST_INT_P (len_rtx))
3124 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3125 return;
3127 else
3129 wide_int min, max;
3130 enum value_range_type range_type = VR_UNDEFINED;
3132 /* Determine bounds from the type. */
3133 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3134 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3135 else
3136 *min_size = 0;
3137 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3138 *probable_max_size = *max_size
3139 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3140 else
3141 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3143 if (TREE_CODE (len) == SSA_NAME)
3144 range_type = get_range_info (len, &min, &max);
3145 if (range_type == VR_RANGE)
3147 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3148 *min_size = min.to_uhwi ();
3149 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3150 *probable_max_size = *max_size = max.to_uhwi ();
3152 else if (range_type == VR_ANTI_RANGE)
3154 /* Anti range 0...N lets us to determine minimal size to N+1. */
3155 if (min == 0)
3157 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3158 *min_size = max.to_uhwi () + 1;
3160 /* Code like
3162 int n;
3163 if (n < 100)
3164 memcpy (a, b, n)
3166 Produce anti range allowing negative values of N. We still
3167 can use the information and make a guess that N is not negative.
3169 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3170 *probable_max_size = min.to_uhwi () - 1;
3173 gcc_checking_assert (*max_size <=
3174 (unsigned HOST_WIDE_INT)
3175 GET_MODE_MASK (GET_MODE (len_rtx)));
3178 /* Expand a call EXP to the memcpy builtin.
3179 Return NULL_RTX if we failed, the caller should emit a normal call,
3180 otherwise try to get the result in TARGET, if convenient (and in
3181 mode MODE if that's convenient). */
3183 static rtx
3184 expand_builtin_memcpy (tree exp, rtx target)
3186 if (!validate_arglist (exp,
3187 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3188 return NULL_RTX;
3189 else
3191 tree dest = CALL_EXPR_ARG (exp, 0);
3192 tree src = CALL_EXPR_ARG (exp, 1);
3193 tree len = CALL_EXPR_ARG (exp, 2);
3194 const char *src_str;
3195 unsigned int src_align = get_pointer_alignment (src);
3196 unsigned int dest_align = get_pointer_alignment (dest);
3197 rtx dest_mem, src_mem, dest_addr, len_rtx;
3198 HOST_WIDE_INT expected_size = -1;
3199 unsigned int expected_align = 0;
3200 unsigned HOST_WIDE_INT min_size;
3201 unsigned HOST_WIDE_INT max_size;
3202 unsigned HOST_WIDE_INT probable_max_size;
3204 /* If DEST is not a pointer type, call the normal function. */
3205 if (dest_align == 0)
3206 return NULL_RTX;
3208 /* If either SRC is not a pointer type, don't do this
3209 operation in-line. */
3210 if (src_align == 0)
3211 return NULL_RTX;
3213 if (currently_expanding_gimple_stmt)
3214 stringop_block_profile (currently_expanding_gimple_stmt,
3215 &expected_align, &expected_size);
3217 if (expected_align < dest_align)
3218 expected_align = dest_align;
3219 dest_mem = get_memory_rtx (dest, len);
3220 set_mem_align (dest_mem, dest_align);
3221 len_rtx = expand_normal (len);
3222 determine_block_size (len, len_rtx, &min_size, &max_size,
3223 &probable_max_size);
3224 src_str = c_getstr (src);
3226 /* If SRC is a string constant and block move would be done
3227 by pieces, we can avoid loading the string from memory
3228 and only stored the computed constants. */
3229 if (src_str
3230 && CONST_INT_P (len_rtx)
3231 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3232 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3233 CONST_CAST (char *, src_str),
3234 dest_align, false))
3236 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3237 builtin_memcpy_read_str,
3238 CONST_CAST (char *, src_str),
3239 dest_align, false, 0);
3240 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3241 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3242 return dest_mem;
3245 src_mem = get_memory_rtx (src, len);
3246 set_mem_align (src_mem, src_align);
3248 /* Copy word part most expediently. */
3249 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3250 CALL_EXPR_TAILCALL (exp)
3251 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3252 expected_align, expected_size,
3253 min_size, max_size, probable_max_size);
3255 if (dest_addr == 0)
3257 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3258 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3260 return dest_addr;
3264 /* Expand a call EXP to the mempcpy builtin.
3265 Return NULL_RTX if we failed; the caller should emit a normal call,
3266 otherwise try to get the result in TARGET, if convenient (and in
3267 mode MODE if that's convenient). If ENDP is 0 return the
3268 destination pointer, if ENDP is 1 return the end pointer ala
3269 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3270 stpcpy. */
3272 static rtx
3273 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3275 if (!validate_arglist (exp,
3276 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3277 return NULL_RTX;
3278 else
3280 tree dest = CALL_EXPR_ARG (exp, 0);
3281 tree src = CALL_EXPR_ARG (exp, 1);
3282 tree len = CALL_EXPR_ARG (exp, 2);
3283 return expand_builtin_mempcpy_args (dest, src, len,
3284 target, mode, /*endp=*/ 1);
3288 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3289 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3290 so that this can also be called without constructing an actual CALL_EXPR.
3291 The other arguments and return value are the same as for
3292 expand_builtin_mempcpy. */
3294 static rtx
3295 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3296 rtx target, machine_mode mode, int endp)
3298 /* If return value is ignored, transform mempcpy into memcpy. */
3299 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3301 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3302 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3303 dest, src, len);
3304 return expand_expr (result, target, mode, EXPAND_NORMAL);
3306 else
3308 const char *src_str;
3309 unsigned int src_align = get_pointer_alignment (src);
3310 unsigned int dest_align = get_pointer_alignment (dest);
3311 rtx dest_mem, src_mem, len_rtx;
3313 /* If either SRC or DEST is not a pointer type, don't do this
3314 operation in-line. */
3315 if (dest_align == 0 || src_align == 0)
3316 return NULL_RTX;
3318 /* If LEN is not constant, call the normal function. */
3319 if (! tree_fits_uhwi_p (len))
3320 return NULL_RTX;
3322 len_rtx = expand_normal (len);
3323 src_str = c_getstr (src);
3325 /* If SRC is a string constant and block move would be done
3326 by pieces, we can avoid loading the string from memory
3327 and only stored the computed constants. */
3328 if (src_str
3329 && CONST_INT_P (len_rtx)
3330 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3331 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3332 CONST_CAST (char *, src_str),
3333 dest_align, false))
3335 dest_mem = get_memory_rtx (dest, len);
3336 set_mem_align (dest_mem, dest_align);
3337 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3338 builtin_memcpy_read_str,
3339 CONST_CAST (char *, src_str),
3340 dest_align, false, endp);
3341 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3342 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3343 return dest_mem;
3346 if (CONST_INT_P (len_rtx)
3347 && can_move_by_pieces (INTVAL (len_rtx),
3348 MIN (dest_align, src_align)))
3350 dest_mem = get_memory_rtx (dest, len);
3351 set_mem_align (dest_mem, dest_align);
3352 src_mem = get_memory_rtx (src, len);
3353 set_mem_align (src_mem, src_align);
3354 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3355 MIN (dest_align, src_align), endp);
3356 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3357 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3358 return dest_mem;
3361 return NULL_RTX;
3365 #ifndef HAVE_movstr
3366 # define HAVE_movstr 0
3367 # define CODE_FOR_movstr CODE_FOR_nothing
3368 #endif
3370 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3371 we failed, the caller should emit a normal call, otherwise try to
3372 get the result in TARGET, if convenient. If ENDP is 0 return the
3373 destination pointer, if ENDP is 1 return the end pointer ala
3374 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3375 stpcpy. */
3377 static rtx
3378 expand_movstr (tree dest, tree src, rtx target, int endp)
3380 struct expand_operand ops[3];
3381 rtx dest_mem;
3382 rtx src_mem;
3384 if (!HAVE_movstr)
3385 return NULL_RTX;
3387 dest_mem = get_memory_rtx (dest, NULL);
3388 src_mem = get_memory_rtx (src, NULL);
3389 if (!endp)
3391 target = force_reg (Pmode, XEXP (dest_mem, 0));
3392 dest_mem = replace_equiv_address (dest_mem, target);
3395 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3396 create_fixed_operand (&ops[1], dest_mem);
3397 create_fixed_operand (&ops[2], src_mem);
3398 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3399 return NULL_RTX;
3401 if (endp && target != const0_rtx)
3403 target = ops[0].value;
3404 /* movstr is supposed to set end to the address of the NUL
3405 terminator. If the caller requested a mempcpy-like return value,
3406 adjust it. */
3407 if (endp == 1)
3409 rtx tem = plus_constant (GET_MODE (target),
3410 gen_lowpart (GET_MODE (target), target), 1);
3411 emit_move_insn (target, force_operand (tem, NULL_RTX));
3414 return target;
3417 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3418 NULL_RTX if we failed the caller should emit a normal call, otherwise
3419 try to get the result in TARGET, if convenient (and in mode MODE if that's
3420 convenient). */
3422 static rtx
3423 expand_builtin_strcpy (tree exp, rtx target)
3425 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3427 tree dest = CALL_EXPR_ARG (exp, 0);
3428 tree src = CALL_EXPR_ARG (exp, 1);
3429 return expand_builtin_strcpy_args (dest, src, target);
3431 return NULL_RTX;
3434 /* Helper function to do the actual work for expand_builtin_strcpy. The
3435 arguments to the builtin_strcpy call DEST and SRC are broken out
3436 so that this can also be called without constructing an actual CALL_EXPR.
3437 The other arguments and return value are the same as for
3438 expand_builtin_strcpy. */
3440 static rtx
3441 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3443 return expand_movstr (dest, src, target, /*endp=*/0);
3446 /* Expand a call EXP to the stpcpy builtin.
3447 Return NULL_RTX if we failed the caller should emit a normal call,
3448 otherwise try to get the result in TARGET, if convenient (and in
3449 mode MODE if that's convenient). */
3451 static rtx
3452 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3454 tree dst, src;
3455 location_t loc = EXPR_LOCATION (exp);
3457 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3458 return NULL_RTX;
3460 dst = CALL_EXPR_ARG (exp, 0);
3461 src = CALL_EXPR_ARG (exp, 1);
3463 /* If return value is ignored, transform stpcpy into strcpy. */
3464 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3466 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3467 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3468 return expand_expr (result, target, mode, EXPAND_NORMAL);
3470 else
3472 tree len, lenp1;
3473 rtx ret;
3475 /* Ensure we get an actual string whose length can be evaluated at
3476 compile-time, not an expression containing a string. This is
3477 because the latter will potentially produce pessimized code
3478 when used to produce the return value. */
3479 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3480 return expand_movstr (dst, src, target, /*endp=*/2);
3482 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3483 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3484 target, mode, /*endp=*/2);
3486 if (ret)
3487 return ret;
3489 if (TREE_CODE (len) == INTEGER_CST)
3491 rtx len_rtx = expand_normal (len);
3493 if (CONST_INT_P (len_rtx))
3495 ret = expand_builtin_strcpy_args (dst, src, target);
3497 if (ret)
3499 if (! target)
3501 if (mode != VOIDmode)
3502 target = gen_reg_rtx (mode);
3503 else
3504 target = gen_reg_rtx (GET_MODE (ret));
3506 if (GET_MODE (target) != GET_MODE (ret))
3507 ret = gen_lowpart (GET_MODE (target), ret);
3509 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3510 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3511 gcc_assert (ret);
3513 return target;
3518 return expand_movstr (dst, src, target, /*endp=*/2);
3522 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3523 bytes from constant string DATA + OFFSET and return it as target
3524 constant. */
3527 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3528 machine_mode mode)
3530 const char *str = (const char *) data;
3532 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3533 return const0_rtx;
3535 return c_readstr (str + offset, mode);
3538 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3539 NULL_RTX if we failed the caller should emit a normal call. */
3541 static rtx
3542 expand_builtin_strncpy (tree exp, rtx target)
3544 location_t loc = EXPR_LOCATION (exp);
3546 if (validate_arglist (exp,
3547 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3549 tree dest = CALL_EXPR_ARG (exp, 0);
3550 tree src = CALL_EXPR_ARG (exp, 1);
3551 tree len = CALL_EXPR_ARG (exp, 2);
3552 tree slen = c_strlen (src, 1);
3554 /* We must be passed a constant len and src parameter. */
3555 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3556 return NULL_RTX;
3558 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3560 /* We're required to pad with trailing zeros if the requested
3561 len is greater than strlen(s2)+1. In that case try to
3562 use store_by_pieces, if it fails, punt. */
3563 if (tree_int_cst_lt (slen, len))
3565 unsigned int dest_align = get_pointer_alignment (dest);
3566 const char *p = c_getstr (src);
3567 rtx dest_mem;
3569 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3570 || !can_store_by_pieces (tree_to_uhwi (len),
3571 builtin_strncpy_read_str,
3572 CONST_CAST (char *, p),
3573 dest_align, false))
3574 return NULL_RTX;
3576 dest_mem = get_memory_rtx (dest, len);
3577 store_by_pieces (dest_mem, tree_to_uhwi (len),
3578 builtin_strncpy_read_str,
3579 CONST_CAST (char *, p), dest_align, false, 0);
3580 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3581 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3582 return dest_mem;
3585 return NULL_RTX;
3588 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3589 bytes from constant string DATA + OFFSET and return it as target
3590 constant. */
3593 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3594 machine_mode mode)
3596 const char *c = (const char *) data;
3597 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3599 memset (p, *c, GET_MODE_SIZE (mode));
3601 return c_readstr (p, mode);
3604 /* Callback routine for store_by_pieces. Return the RTL of a register
3605 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3606 char value given in the RTL register data. For example, if mode is
3607 4 bytes wide, return the RTL for 0x01010101*data. */
3609 static rtx
3610 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3611 machine_mode mode)
3613 rtx target, coeff;
3614 size_t size;
3615 char *p;
3617 size = GET_MODE_SIZE (mode);
3618 if (size == 1)
3619 return (rtx) data;
3621 p = XALLOCAVEC (char, size);
3622 memset (p, 1, size);
3623 coeff = c_readstr (p, mode);
3625 target = convert_to_mode (mode, (rtx) data, 1);
3626 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3627 return force_reg (mode, target);
3630 /* Expand expression EXP, which is a call to the memset builtin. Return
3631 NULL_RTX if we failed the caller should emit a normal call, otherwise
3632 try to get the result in TARGET, if convenient (and in mode MODE if that's
3633 convenient). */
3635 static rtx
3636 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3638 if (!validate_arglist (exp,
3639 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3640 return NULL_RTX;
3641 else
3643 tree dest = CALL_EXPR_ARG (exp, 0);
3644 tree val = CALL_EXPR_ARG (exp, 1);
3645 tree len = CALL_EXPR_ARG (exp, 2);
3646 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3650 /* Helper function to do the actual work for expand_builtin_memset. The
3651 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3652 so that this can also be called without constructing an actual CALL_EXPR.
3653 The other arguments and return value are the same as for
3654 expand_builtin_memset. */
3656 static rtx
3657 expand_builtin_memset_args (tree dest, tree val, tree len,
3658 rtx target, machine_mode mode, tree orig_exp)
3660 tree fndecl, fn;
3661 enum built_in_function fcode;
3662 machine_mode val_mode;
3663 char c;
3664 unsigned int dest_align;
3665 rtx dest_mem, dest_addr, len_rtx;
3666 HOST_WIDE_INT expected_size = -1;
3667 unsigned int expected_align = 0;
3668 unsigned HOST_WIDE_INT min_size;
3669 unsigned HOST_WIDE_INT max_size;
3670 unsigned HOST_WIDE_INT probable_max_size;
3672 dest_align = get_pointer_alignment (dest);
3674 /* If DEST is not a pointer type, don't do this operation in-line. */
3675 if (dest_align == 0)
3676 return NULL_RTX;
3678 if (currently_expanding_gimple_stmt)
3679 stringop_block_profile (currently_expanding_gimple_stmt,
3680 &expected_align, &expected_size);
3682 if (expected_align < dest_align)
3683 expected_align = dest_align;
3685 /* If the LEN parameter is zero, return DEST. */
3686 if (integer_zerop (len))
3688 /* Evaluate and ignore VAL in case it has side-effects. */
3689 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3690 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3693 /* Stabilize the arguments in case we fail. */
3694 dest = builtin_save_expr (dest);
3695 val = builtin_save_expr (val);
3696 len = builtin_save_expr (len);
3698 len_rtx = expand_normal (len);
3699 determine_block_size (len, len_rtx, &min_size, &max_size,
3700 &probable_max_size);
3701 dest_mem = get_memory_rtx (dest, len);
3702 val_mode = TYPE_MODE (unsigned_char_type_node);
3704 if (TREE_CODE (val) != INTEGER_CST)
3706 rtx val_rtx;
3708 val_rtx = expand_normal (val);
3709 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3711 /* Assume that we can memset by pieces if we can store
3712 * the coefficients by pieces (in the required modes).
3713 * We can't pass builtin_memset_gen_str as that emits RTL. */
3714 c = 1;
3715 if (tree_fits_uhwi_p (len)
3716 && can_store_by_pieces (tree_to_uhwi (len),
3717 builtin_memset_read_str, &c, dest_align,
3718 true))
3720 val_rtx = force_reg (val_mode, val_rtx);
3721 store_by_pieces (dest_mem, tree_to_uhwi (len),
3722 builtin_memset_gen_str, val_rtx, dest_align,
3723 true, 0);
3725 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3726 dest_align, expected_align,
3727 expected_size, min_size, max_size,
3728 probable_max_size))
3729 goto do_libcall;
3731 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3732 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3733 return dest_mem;
3736 if (target_char_cast (val, &c))
3737 goto do_libcall;
3739 if (c)
3741 if (tree_fits_uhwi_p (len)
3742 && can_store_by_pieces (tree_to_uhwi (len),
3743 builtin_memset_read_str, &c, dest_align,
3744 true))
3745 store_by_pieces (dest_mem, tree_to_uhwi (len),
3746 builtin_memset_read_str, &c, dest_align, true, 0);
3747 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3748 gen_int_mode (c, val_mode),
3749 dest_align, expected_align,
3750 expected_size, min_size, max_size,
3751 probable_max_size))
3752 goto do_libcall;
3754 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3755 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3756 return dest_mem;
3759 set_mem_align (dest_mem, dest_align);
3760 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3761 CALL_EXPR_TAILCALL (orig_exp)
3762 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3763 expected_align, expected_size,
3764 min_size, max_size,
3765 probable_max_size);
3767 if (dest_addr == 0)
3769 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3770 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3773 return dest_addr;
3775 do_libcall:
3776 fndecl = get_callee_fndecl (orig_exp);
3777 fcode = DECL_FUNCTION_CODE (fndecl);
3778 if (fcode == BUILT_IN_MEMSET)
3779 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3780 dest, val, len);
3781 else if (fcode == BUILT_IN_BZERO)
3782 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3783 dest, len);
3784 else
3785 gcc_unreachable ();
3786 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3787 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3788 return expand_call (fn, target, target == const0_rtx);
3791 /* Expand expression EXP, which is a call to the bzero builtin. Return
3792 NULL_RTX if we failed the caller should emit a normal call. */
3794 static rtx
3795 expand_builtin_bzero (tree exp)
3797 tree dest, size;
3798 location_t loc = EXPR_LOCATION (exp);
3800 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3801 return NULL_RTX;
3803 dest = CALL_EXPR_ARG (exp, 0);
3804 size = CALL_EXPR_ARG (exp, 1);
3806 /* New argument list transforming bzero(ptr x, int y) to
3807 memset(ptr x, int 0, size_t y). This is done this way
3808 so that if it isn't expanded inline, we fallback to
3809 calling bzero instead of memset. */
3811 return expand_builtin_memset_args (dest, integer_zero_node,
3812 fold_convert_loc (loc,
3813 size_type_node, size),
3814 const0_rtx, VOIDmode, exp);
3817 /* Expand expression EXP, which is a call to the memcmp built-in function.
3818 Return NULL_RTX if we failed and the caller should emit a normal call,
3819 otherwise try to get the result in TARGET, if convenient (and in mode
3820 MODE, if that's convenient). */
3822 static rtx
3823 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3824 ATTRIBUTE_UNUSED machine_mode mode)
3826 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3828 if (!validate_arglist (exp,
3829 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3830 return NULL_RTX;
3832 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3833 implementing memcmp because it will stop if it encounters two
3834 zero bytes. */
3835 #if defined HAVE_cmpmemsi
3837 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3838 rtx result;
3839 rtx insn;
3840 tree arg1 = CALL_EXPR_ARG (exp, 0);
3841 tree arg2 = CALL_EXPR_ARG (exp, 1);
3842 tree len = CALL_EXPR_ARG (exp, 2);
3844 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3845 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3846 machine_mode insn_mode;
3848 if (HAVE_cmpmemsi)
3849 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3850 else
3851 return NULL_RTX;
3853 /* If we don't have POINTER_TYPE, call the function. */
3854 if (arg1_align == 0 || arg2_align == 0)
3855 return NULL_RTX;
3857 /* Make a place to write the result of the instruction. */
3858 result = target;
3859 if (! (result != 0
3860 && REG_P (result) && GET_MODE (result) == insn_mode
3861 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3862 result = gen_reg_rtx (insn_mode);
3864 arg1_rtx = get_memory_rtx (arg1, len);
3865 arg2_rtx = get_memory_rtx (arg2, len);
3866 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3868 /* Set MEM_SIZE as appropriate. */
3869 if (CONST_INT_P (arg3_rtx))
3871 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3872 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3875 if (HAVE_cmpmemsi)
3876 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3877 GEN_INT (MIN (arg1_align, arg2_align)));
3878 else
3879 gcc_unreachable ();
3881 if (insn)
3882 emit_insn (insn);
3883 else
3884 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3885 TYPE_MODE (integer_type_node), 3,
3886 XEXP (arg1_rtx, 0), Pmode,
3887 XEXP (arg2_rtx, 0), Pmode,
3888 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3889 TYPE_UNSIGNED (sizetype)),
3890 TYPE_MODE (sizetype));
3892 /* Return the value in the proper mode for this function. */
3893 mode = TYPE_MODE (TREE_TYPE (exp));
3894 if (GET_MODE (result) == mode)
3895 return result;
3896 else if (target != 0)
3898 convert_move (target, result, 0);
3899 return target;
3901 else
3902 return convert_to_mode (mode, result, 0);
3904 #endif /* HAVE_cmpmemsi. */
3906 return NULL_RTX;
3909 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3910 if we failed the caller should emit a normal call, otherwise try to get
3911 the result in TARGET, if convenient. */
3913 static rtx
3914 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3916 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3917 return NULL_RTX;
3919 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3920 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3921 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3923 rtx arg1_rtx, arg2_rtx;
3924 rtx result, insn = NULL_RTX;
3925 tree fndecl, fn;
3926 tree arg1 = CALL_EXPR_ARG (exp, 0);
3927 tree arg2 = CALL_EXPR_ARG (exp, 1);
3929 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3930 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3932 /* If we don't have POINTER_TYPE, call the function. */
3933 if (arg1_align == 0 || arg2_align == 0)
3934 return NULL_RTX;
3936 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3937 arg1 = builtin_save_expr (arg1);
3938 arg2 = builtin_save_expr (arg2);
3940 arg1_rtx = get_memory_rtx (arg1, NULL);
3941 arg2_rtx = get_memory_rtx (arg2, NULL);
3943 #ifdef HAVE_cmpstrsi
3944 /* Try to call cmpstrsi. */
3945 if (HAVE_cmpstrsi)
3947 machine_mode insn_mode
3948 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3950 /* Make a place to write the result of the instruction. */
3951 result = target;
3952 if (! (result != 0
3953 && REG_P (result) && GET_MODE (result) == insn_mode
3954 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3955 result = gen_reg_rtx (insn_mode);
3957 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3958 GEN_INT (MIN (arg1_align, arg2_align)));
3960 #endif
3961 #ifdef HAVE_cmpstrnsi
3962 /* Try to determine at least one length and call cmpstrnsi. */
3963 if (!insn && HAVE_cmpstrnsi)
3965 tree len;
3966 rtx arg3_rtx;
3968 machine_mode insn_mode
3969 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3970 tree len1 = c_strlen (arg1, 1);
3971 tree len2 = c_strlen (arg2, 1);
3973 if (len1)
3974 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3975 if (len2)
3976 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3978 /* If we don't have a constant length for the first, use the length
3979 of the second, if we know it. We don't require a constant for
3980 this case; some cost analysis could be done if both are available
3981 but neither is constant. For now, assume they're equally cheap,
3982 unless one has side effects. If both strings have constant lengths,
3983 use the smaller. */
3985 if (!len1)
3986 len = len2;
3987 else if (!len2)
3988 len = len1;
3989 else if (TREE_SIDE_EFFECTS (len1))
3990 len = len2;
3991 else if (TREE_SIDE_EFFECTS (len2))
3992 len = len1;
3993 else if (TREE_CODE (len1) != INTEGER_CST)
3994 len = len2;
3995 else if (TREE_CODE (len2) != INTEGER_CST)
3996 len = len1;
3997 else if (tree_int_cst_lt (len1, len2))
3998 len = len1;
3999 else
4000 len = len2;
4002 /* If both arguments have side effects, we cannot optimize. */
4003 if (!len || TREE_SIDE_EFFECTS (len))
4004 goto do_libcall;
4006 arg3_rtx = expand_normal (len);
4008 /* Make a place to write the result of the instruction. */
4009 result = target;
4010 if (! (result != 0
4011 && REG_P (result) && GET_MODE (result) == insn_mode
4012 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4013 result = gen_reg_rtx (insn_mode);
4015 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4016 GEN_INT (MIN (arg1_align, arg2_align)));
4018 #endif
4020 if (insn)
4022 machine_mode mode;
4023 emit_insn (insn);
4025 /* Return the value in the proper mode for this function. */
4026 mode = TYPE_MODE (TREE_TYPE (exp));
4027 if (GET_MODE (result) == mode)
4028 return result;
4029 if (target == 0)
4030 return convert_to_mode (mode, result, 0);
4031 convert_move (target, result, 0);
4032 return target;
4035 /* Expand the library call ourselves using a stabilized argument
4036 list to avoid re-evaluating the function's arguments twice. */
4037 #ifdef HAVE_cmpstrnsi
4038 do_libcall:
4039 #endif
4040 fndecl = get_callee_fndecl (exp);
4041 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4042 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4043 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4044 return expand_call (fn, target, target == const0_rtx);
4046 #endif
4047 return NULL_RTX;
4050 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4051 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4052 the result in TARGET, if convenient. */
4054 static rtx
4055 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4056 ATTRIBUTE_UNUSED machine_mode mode)
4058 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4060 if (!validate_arglist (exp,
4061 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4062 return NULL_RTX;
4064 /* If c_strlen can determine an expression for one of the string
4065 lengths, and it doesn't have side effects, then emit cmpstrnsi
4066 using length MIN(strlen(string)+1, arg3). */
4067 #ifdef HAVE_cmpstrnsi
4068 if (HAVE_cmpstrnsi)
4070 tree len, len1, len2;
4071 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4072 rtx result, insn;
4073 tree fndecl, fn;
4074 tree arg1 = CALL_EXPR_ARG (exp, 0);
4075 tree arg2 = CALL_EXPR_ARG (exp, 1);
4076 tree arg3 = CALL_EXPR_ARG (exp, 2);
4078 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4079 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4080 machine_mode insn_mode
4081 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4083 len1 = c_strlen (arg1, 1);
4084 len2 = c_strlen (arg2, 1);
4086 if (len1)
4087 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4088 if (len2)
4089 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4091 /* If we don't have a constant length for the first, use the length
4092 of the second, if we know it. We don't require a constant for
4093 this case; some cost analysis could be done if both are available
4094 but neither is constant. For now, assume they're equally cheap,
4095 unless one has side effects. If both strings have constant lengths,
4096 use the smaller. */
4098 if (!len1)
4099 len = len2;
4100 else if (!len2)
4101 len = len1;
4102 else if (TREE_SIDE_EFFECTS (len1))
4103 len = len2;
4104 else if (TREE_SIDE_EFFECTS (len2))
4105 len = len1;
4106 else if (TREE_CODE (len1) != INTEGER_CST)
4107 len = len2;
4108 else if (TREE_CODE (len2) != INTEGER_CST)
4109 len = len1;
4110 else if (tree_int_cst_lt (len1, len2))
4111 len = len1;
4112 else
4113 len = len2;
4115 /* If both arguments have side effects, we cannot optimize. */
4116 if (!len || TREE_SIDE_EFFECTS (len))
4117 return NULL_RTX;
4119 /* The actual new length parameter is MIN(len,arg3). */
4120 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4121 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4123 /* If we don't have POINTER_TYPE, call the function. */
4124 if (arg1_align == 0 || arg2_align == 0)
4125 return NULL_RTX;
4127 /* Make a place to write the result of the instruction. */
4128 result = target;
4129 if (! (result != 0
4130 && REG_P (result) && GET_MODE (result) == insn_mode
4131 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4132 result = gen_reg_rtx (insn_mode);
4134 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4135 arg1 = builtin_save_expr (arg1);
4136 arg2 = builtin_save_expr (arg2);
4137 len = builtin_save_expr (len);
4139 arg1_rtx = get_memory_rtx (arg1, len);
4140 arg2_rtx = get_memory_rtx (arg2, len);
4141 arg3_rtx = expand_normal (len);
4142 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4143 GEN_INT (MIN (arg1_align, arg2_align)));
4144 if (insn)
4146 emit_insn (insn);
4148 /* Return the value in the proper mode for this function. */
4149 mode = TYPE_MODE (TREE_TYPE (exp));
4150 if (GET_MODE (result) == mode)
4151 return result;
4152 if (target == 0)
4153 return convert_to_mode (mode, result, 0);
4154 convert_move (target, result, 0);
4155 return target;
4158 /* Expand the library call ourselves using a stabilized argument
4159 list to avoid re-evaluating the function's arguments twice. */
4160 fndecl = get_callee_fndecl (exp);
4161 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4162 arg1, arg2, len);
4163 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4164 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4165 return expand_call (fn, target, target == const0_rtx);
4167 #endif
4168 return NULL_RTX;
4171 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4172 if that's convenient. */
4175 expand_builtin_saveregs (void)
4177 rtx val;
4178 rtx_insn *seq;
4180 /* Don't do __builtin_saveregs more than once in a function.
4181 Save the result of the first call and reuse it. */
4182 if (saveregs_value != 0)
4183 return saveregs_value;
4185 /* When this function is called, it means that registers must be
4186 saved on entry to this function. So we migrate the call to the
4187 first insn of this function. */
4189 start_sequence ();
4191 /* Do whatever the machine needs done in this case. */
4192 val = targetm.calls.expand_builtin_saveregs ();
4194 seq = get_insns ();
4195 end_sequence ();
4197 saveregs_value = val;
4199 /* Put the insns after the NOTE that starts the function. If this
4200 is inside a start_sequence, make the outer-level insn chain current, so
4201 the code is placed at the start of the function. */
4202 push_topmost_sequence ();
4203 emit_insn_after (seq, entry_of_function ());
4204 pop_topmost_sequence ();
4206 return val;
4209 /* Expand a call to __builtin_next_arg. */
4211 static rtx
4212 expand_builtin_next_arg (void)
4214 /* Checking arguments is already done in fold_builtin_next_arg
4215 that must be called before this function. */
4216 return expand_binop (ptr_mode, add_optab,
4217 crtl->args.internal_arg_pointer,
4218 crtl->args.arg_offset_rtx,
4219 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4222 /* Make it easier for the backends by protecting the valist argument
4223 from multiple evaluations. */
4225 static tree
4226 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4228 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4230 /* The current way of determining the type of valist is completely
4231 bogus. We should have the information on the va builtin instead. */
4232 if (!vatype)
4233 vatype = targetm.fn_abi_va_list (cfun->decl);
4235 if (TREE_CODE (vatype) == ARRAY_TYPE)
4237 if (TREE_SIDE_EFFECTS (valist))
4238 valist = save_expr (valist);
4240 /* For this case, the backends will be expecting a pointer to
4241 vatype, but it's possible we've actually been given an array
4242 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4243 So fix it. */
4244 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4246 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4247 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4250 else
4252 tree pt = build_pointer_type (vatype);
4254 if (! needs_lvalue)
4256 if (! TREE_SIDE_EFFECTS (valist))
4257 return valist;
4259 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4260 TREE_SIDE_EFFECTS (valist) = 1;
4263 if (TREE_SIDE_EFFECTS (valist))
4264 valist = save_expr (valist);
4265 valist = fold_build2_loc (loc, MEM_REF,
4266 vatype, valist, build_int_cst (pt, 0));
4269 return valist;
4272 /* The "standard" definition of va_list is void*. */
4274 tree
4275 std_build_builtin_va_list (void)
4277 return ptr_type_node;
4280 /* The "standard" abi va_list is va_list_type_node. */
4282 tree
4283 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4285 return va_list_type_node;
4288 /* The "standard" type of va_list is va_list_type_node. */
4290 tree
4291 std_canonical_va_list_type (tree type)
4293 tree wtype, htype;
4295 if (INDIRECT_REF_P (type))
4296 type = TREE_TYPE (type);
4297 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4298 type = TREE_TYPE (type);
4299 wtype = va_list_type_node;
4300 htype = type;
4301 /* Treat structure va_list types. */
4302 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4303 htype = TREE_TYPE (htype);
4304 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4306 /* If va_list is an array type, the argument may have decayed
4307 to a pointer type, e.g. by being passed to another function.
4308 In that case, unwrap both types so that we can compare the
4309 underlying records. */
4310 if (TREE_CODE (htype) == ARRAY_TYPE
4311 || POINTER_TYPE_P (htype))
4313 wtype = TREE_TYPE (wtype);
4314 htype = TREE_TYPE (htype);
4317 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4318 return va_list_type_node;
4320 return NULL_TREE;
4323 /* The "standard" implementation of va_start: just assign `nextarg' to
4324 the variable. */
4326 void
4327 std_expand_builtin_va_start (tree valist, rtx nextarg)
4329 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4330 convert_move (va_r, nextarg, 0);
4332 /* We do not have any valid bounds for the pointer, so
4333 just store zero bounds for it. */
4334 if (chkp_function_instrumented_p (current_function_decl))
4335 chkp_expand_bounds_reset_for_mem (valist,
4336 make_tree (TREE_TYPE (valist),
4337 nextarg));
4340 /* Expand EXP, a call to __builtin_va_start. */
4342 static rtx
4343 expand_builtin_va_start (tree exp)
4345 rtx nextarg;
4346 tree valist;
4347 location_t loc = EXPR_LOCATION (exp);
4349 if (call_expr_nargs (exp) < 2)
4351 error_at (loc, "too few arguments to function %<va_start%>");
4352 return const0_rtx;
4355 if (fold_builtin_next_arg (exp, true))
4356 return const0_rtx;
4358 nextarg = expand_builtin_next_arg ();
4359 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4361 if (targetm.expand_builtin_va_start)
4362 targetm.expand_builtin_va_start (valist, nextarg);
4363 else
4364 std_expand_builtin_va_start (valist, nextarg);
4366 return const0_rtx;
4369 /* Expand EXP, a call to __builtin_va_end. */
4371 static rtx
4372 expand_builtin_va_end (tree exp)
4374 tree valist = CALL_EXPR_ARG (exp, 0);
4376 /* Evaluate for side effects, if needed. I hate macros that don't
4377 do that. */
4378 if (TREE_SIDE_EFFECTS (valist))
4379 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4381 return const0_rtx;
4384 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4385 builtin rather than just as an assignment in stdarg.h because of the
4386 nastiness of array-type va_list types. */
4388 static rtx
4389 expand_builtin_va_copy (tree exp)
4391 tree dst, src, t;
4392 location_t loc = EXPR_LOCATION (exp);
4394 dst = CALL_EXPR_ARG (exp, 0);
4395 src = CALL_EXPR_ARG (exp, 1);
4397 dst = stabilize_va_list_loc (loc, dst, 1);
4398 src = stabilize_va_list_loc (loc, src, 0);
4400 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4402 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4404 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4405 TREE_SIDE_EFFECTS (t) = 1;
4406 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4408 else
4410 rtx dstb, srcb, size;
4412 /* Evaluate to pointers. */
4413 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4414 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4415 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4416 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4418 dstb = convert_memory_address (Pmode, dstb);
4419 srcb = convert_memory_address (Pmode, srcb);
4421 /* "Dereference" to BLKmode memories. */
4422 dstb = gen_rtx_MEM (BLKmode, dstb);
4423 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4424 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4425 srcb = gen_rtx_MEM (BLKmode, srcb);
4426 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4427 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4429 /* Copy. */
4430 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4433 return const0_rtx;
4436 /* Expand a call to one of the builtin functions __builtin_frame_address or
4437 __builtin_return_address. */
4439 static rtx
4440 expand_builtin_frame_address (tree fndecl, tree exp)
4442 /* The argument must be a nonnegative integer constant.
4443 It counts the number of frames to scan up the stack.
4444 The value is the return address saved in that frame. */
4445 if (call_expr_nargs (exp) == 0)
4446 /* Warning about missing arg was already issued. */
4447 return const0_rtx;
4448 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4450 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4451 error ("invalid argument to %<__builtin_frame_address%>");
4452 else
4453 error ("invalid argument to %<__builtin_return_address%>");
4454 return const0_rtx;
4456 else
4458 rtx tem
4459 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4460 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4462 /* Some ports cannot access arbitrary stack frames. */
4463 if (tem == NULL)
4465 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4466 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4467 else
4468 warning (0, "unsupported argument to %<__builtin_return_address%>");
4469 return const0_rtx;
4472 /* For __builtin_frame_address, return what we've got. */
4473 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4474 return tem;
4476 if (!REG_P (tem)
4477 && ! CONSTANT_P (tem))
4478 tem = copy_addr_to_reg (tem);
4479 return tem;
4483 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4484 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4485 is the same as for allocate_dynamic_stack_space. */
4487 static rtx
4488 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4490 rtx op0;
4491 rtx result;
4492 bool valid_arglist;
4493 unsigned int align;
4494 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4495 == BUILT_IN_ALLOCA_WITH_ALIGN);
4497 valid_arglist
4498 = (alloca_with_align
4499 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4500 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4502 if (!valid_arglist)
4503 return NULL_RTX;
4505 /* Compute the argument. */
4506 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4508 /* Compute the alignment. */
4509 align = (alloca_with_align
4510 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4511 : BIGGEST_ALIGNMENT);
4513 /* Allocate the desired space. */
4514 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4515 result = convert_memory_address (ptr_mode, result);
4517 return result;
4520 /* Expand a call to bswap builtin in EXP.
4521 Return NULL_RTX if a normal call should be emitted rather than expanding the
4522 function in-line. If convenient, the result should be placed in TARGET.
4523 SUBTARGET may be used as the target for computing one of EXP's operands. */
4525 static rtx
4526 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4527 rtx subtarget)
4529 tree arg;
4530 rtx op0;
4532 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4533 return NULL_RTX;
4535 arg = CALL_EXPR_ARG (exp, 0);
4536 op0 = expand_expr (arg,
4537 subtarget && GET_MODE (subtarget) == target_mode
4538 ? subtarget : NULL_RTX,
4539 target_mode, EXPAND_NORMAL);
4540 if (GET_MODE (op0) != target_mode)
4541 op0 = convert_to_mode (target_mode, op0, 1);
4543 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4545 gcc_assert (target);
4547 return convert_to_mode (target_mode, target, 1);
4550 /* Expand a call to a unary builtin in EXP.
4551 Return NULL_RTX if a normal call should be emitted rather than expanding the
4552 function in-line. If convenient, the result should be placed in TARGET.
4553 SUBTARGET may be used as the target for computing one of EXP's operands. */
4555 static rtx
4556 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4557 rtx subtarget, optab op_optab)
4559 rtx op0;
4561 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4562 return NULL_RTX;
4564 /* Compute the argument. */
4565 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4566 (subtarget
4567 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4568 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4569 VOIDmode, EXPAND_NORMAL);
4570 /* Compute op, into TARGET if possible.
4571 Set TARGET to wherever the result comes back. */
4572 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4573 op_optab, op0, target, op_optab != clrsb_optab);
4574 gcc_assert (target);
4576 return convert_to_mode (target_mode, target, 0);
4579 /* Expand a call to __builtin_expect. We just return our argument
4580 as the builtin_expect semantic should've been already executed by
4581 tree branch prediction pass. */
4583 static rtx
4584 expand_builtin_expect (tree exp, rtx target)
4586 tree arg;
4588 if (call_expr_nargs (exp) < 2)
4589 return const0_rtx;
4590 arg = CALL_EXPR_ARG (exp, 0);
4592 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4593 /* When guessing was done, the hints should be already stripped away. */
4594 gcc_assert (!flag_guess_branch_prob
4595 || optimize == 0 || seen_error ());
4596 return target;
4599 /* Expand a call to __builtin_assume_aligned. We just return our first
4600 argument as the builtin_assume_aligned semantic should've been already
4601 executed by CCP. */
4603 static rtx
4604 expand_builtin_assume_aligned (tree exp, rtx target)
4606 if (call_expr_nargs (exp) < 2)
4607 return const0_rtx;
4608 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4609 EXPAND_NORMAL);
4610 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4611 && (call_expr_nargs (exp) < 3
4612 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4613 return target;
4616 void
4617 expand_builtin_trap (void)
4619 #ifdef HAVE_trap
4620 if (HAVE_trap)
4622 rtx insn = emit_insn (gen_trap ());
4623 /* For trap insns when not accumulating outgoing args force
4624 REG_ARGS_SIZE note to prevent crossjumping of calls with
4625 different args sizes. */
4626 if (!ACCUMULATE_OUTGOING_ARGS)
4627 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4629 else
4630 #endif
4631 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4632 emit_barrier ();
4635 /* Expand a call to __builtin_unreachable. We do nothing except emit
4636 a barrier saying that control flow will not pass here.
4638 It is the responsibility of the program being compiled to ensure
4639 that control flow does never reach __builtin_unreachable. */
4640 static void
4641 expand_builtin_unreachable (void)
4643 emit_barrier ();
4646 /* Expand EXP, a call to fabs, fabsf or fabsl.
4647 Return NULL_RTX if a normal call should be emitted rather than expanding
4648 the function inline. If convenient, the result should be placed
4649 in TARGET. SUBTARGET may be used as the target for computing
4650 the operand. */
4652 static rtx
4653 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4655 machine_mode mode;
4656 tree arg;
4657 rtx op0;
4659 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4660 return NULL_RTX;
4662 arg = CALL_EXPR_ARG (exp, 0);
4663 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4664 mode = TYPE_MODE (TREE_TYPE (arg));
4665 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4666 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4669 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4670 Return NULL is a normal call should be emitted rather than expanding the
4671 function inline. If convenient, the result should be placed in TARGET.
4672 SUBTARGET may be used as the target for computing the operand. */
4674 static rtx
4675 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4677 rtx op0, op1;
4678 tree arg;
4680 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4681 return NULL_RTX;
4683 arg = CALL_EXPR_ARG (exp, 0);
4684 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4686 arg = CALL_EXPR_ARG (exp, 1);
4687 op1 = expand_normal (arg);
4689 return expand_copysign (op0, op1, target);
4692 /* Expand a call to __builtin___clear_cache. */
4694 static rtx
4695 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4697 #ifndef HAVE_clear_cache
4698 #ifdef CLEAR_INSN_CACHE
4699 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4700 does something. Just do the default expansion to a call to
4701 __clear_cache(). */
4702 return NULL_RTX;
4703 #else
4704 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4705 does nothing. There is no need to call it. Do nothing. */
4706 return const0_rtx;
4707 #endif /* CLEAR_INSN_CACHE */
4708 #else
4709 /* We have a "clear_cache" insn, and it will handle everything. */
4710 tree begin, end;
4711 rtx begin_rtx, end_rtx;
4713 /* We must not expand to a library call. If we did, any
4714 fallback library function in libgcc that might contain a call to
4715 __builtin___clear_cache() would recurse infinitely. */
4716 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4718 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4719 return const0_rtx;
4722 if (HAVE_clear_cache)
4724 struct expand_operand ops[2];
4726 begin = CALL_EXPR_ARG (exp, 0);
4727 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4729 end = CALL_EXPR_ARG (exp, 1);
4730 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4732 create_address_operand (&ops[0], begin_rtx);
4733 create_address_operand (&ops[1], end_rtx);
4734 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4735 return const0_rtx;
4737 return const0_rtx;
4738 #endif /* HAVE_clear_cache */
4741 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4743 static rtx
4744 round_trampoline_addr (rtx tramp)
4746 rtx temp, addend, mask;
4748 /* If we don't need too much alignment, we'll have been guaranteed
4749 proper alignment by get_trampoline_type. */
4750 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4751 return tramp;
4753 /* Round address up to desired boundary. */
4754 temp = gen_reg_rtx (Pmode);
4755 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4756 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4758 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4759 temp, 0, OPTAB_LIB_WIDEN);
4760 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4761 temp, 0, OPTAB_LIB_WIDEN);
4763 return tramp;
4766 static rtx
4767 expand_builtin_init_trampoline (tree exp, bool onstack)
4769 tree t_tramp, t_func, t_chain;
4770 rtx m_tramp, r_tramp, r_chain, tmp;
4772 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4773 POINTER_TYPE, VOID_TYPE))
4774 return NULL_RTX;
4776 t_tramp = CALL_EXPR_ARG (exp, 0);
4777 t_func = CALL_EXPR_ARG (exp, 1);
4778 t_chain = CALL_EXPR_ARG (exp, 2);
4780 r_tramp = expand_normal (t_tramp);
4781 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4782 MEM_NOTRAP_P (m_tramp) = 1;
4784 /* If ONSTACK, the TRAMP argument should be the address of a field
4785 within the local function's FRAME decl. Either way, let's see if
4786 we can fill in the MEM_ATTRs for this memory. */
4787 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4788 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4790 /* Creator of a heap trampoline is responsible for making sure the
4791 address is aligned to at least STACK_BOUNDARY. Normally malloc
4792 will ensure this anyhow. */
4793 tmp = round_trampoline_addr (r_tramp);
4794 if (tmp != r_tramp)
4796 m_tramp = change_address (m_tramp, BLKmode, tmp);
4797 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4798 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4801 /* The FUNC argument should be the address of the nested function.
4802 Extract the actual function decl to pass to the hook. */
4803 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4804 t_func = TREE_OPERAND (t_func, 0);
4805 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4807 r_chain = expand_normal (t_chain);
4809 /* Generate insns to initialize the trampoline. */
4810 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4812 if (onstack)
4814 trampolines_created = 1;
4816 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4817 "trampoline generated for nested function %qD", t_func);
4820 return const0_rtx;
4823 static rtx
4824 expand_builtin_adjust_trampoline (tree exp)
4826 rtx tramp;
4828 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4829 return NULL_RTX;
4831 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4832 tramp = round_trampoline_addr (tramp);
4833 if (targetm.calls.trampoline_adjust_address)
4834 tramp = targetm.calls.trampoline_adjust_address (tramp);
4836 return tramp;
4839 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4840 function. The function first checks whether the back end provides
4841 an insn to implement signbit for the respective mode. If not, it
4842 checks whether the floating point format of the value is such that
4843 the sign bit can be extracted. If that is not the case, the
4844 function returns NULL_RTX to indicate that a normal call should be
4845 emitted rather than expanding the function in-line. EXP is the
4846 expression that is a call to the builtin function; if convenient,
4847 the result should be placed in TARGET. */
4848 static rtx
4849 expand_builtin_signbit (tree exp, rtx target)
4851 const struct real_format *fmt;
4852 machine_mode fmode, imode, rmode;
4853 tree arg;
4854 int word, bitpos;
4855 enum insn_code icode;
4856 rtx temp;
4857 location_t loc = EXPR_LOCATION (exp);
4859 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4860 return NULL_RTX;
4862 arg = CALL_EXPR_ARG (exp, 0);
4863 fmode = TYPE_MODE (TREE_TYPE (arg));
4864 rmode = TYPE_MODE (TREE_TYPE (exp));
4865 fmt = REAL_MODE_FORMAT (fmode);
4867 arg = builtin_save_expr (arg);
4869 /* Expand the argument yielding a RTX expression. */
4870 temp = expand_normal (arg);
4872 /* Check if the back end provides an insn that handles signbit for the
4873 argument's mode. */
4874 icode = optab_handler (signbit_optab, fmode);
4875 if (icode != CODE_FOR_nothing)
4877 rtx_insn *last = get_last_insn ();
4878 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4879 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4880 return target;
4881 delete_insns_since (last);
4884 /* For floating point formats without a sign bit, implement signbit
4885 as "ARG < 0.0". */
4886 bitpos = fmt->signbit_ro;
4887 if (bitpos < 0)
4889 /* But we can't do this if the format supports signed zero. */
4890 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4891 return NULL_RTX;
4893 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4894 build_real (TREE_TYPE (arg), dconst0));
4895 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4898 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4900 imode = int_mode_for_mode (fmode);
4901 if (imode == BLKmode)
4902 return NULL_RTX;
4903 temp = gen_lowpart (imode, temp);
4905 else
4907 imode = word_mode;
4908 /* Handle targets with different FP word orders. */
4909 if (FLOAT_WORDS_BIG_ENDIAN)
4910 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4911 else
4912 word = bitpos / BITS_PER_WORD;
4913 temp = operand_subword_force (temp, word, fmode);
4914 bitpos = bitpos % BITS_PER_WORD;
4917 /* Force the intermediate word_mode (or narrower) result into a
4918 register. This avoids attempting to create paradoxical SUBREGs
4919 of floating point modes below. */
4920 temp = force_reg (imode, temp);
4922 /* If the bitpos is within the "result mode" lowpart, the operation
4923 can be implement with a single bitwise AND. Otherwise, we need
4924 a right shift and an AND. */
4926 if (bitpos < GET_MODE_BITSIZE (rmode))
4928 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4930 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4931 temp = gen_lowpart (rmode, temp);
4932 temp = expand_binop (rmode, and_optab, temp,
4933 immed_wide_int_const (mask, rmode),
4934 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4936 else
4938 /* Perform a logical right shift to place the signbit in the least
4939 significant bit, then truncate the result to the desired mode
4940 and mask just this bit. */
4941 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4942 temp = gen_lowpart (rmode, temp);
4943 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4944 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4947 return temp;
4950 /* Expand fork or exec calls. TARGET is the desired target of the
4951 call. EXP is the call. FN is the
4952 identificator of the actual function. IGNORE is nonzero if the
4953 value is to be ignored. */
4955 static rtx
4956 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4958 tree id, decl;
4959 tree call;
4961 /* If we are not profiling, just call the function. */
4962 if (!profile_arc_flag)
4963 return NULL_RTX;
4965 /* Otherwise call the wrapper. This should be equivalent for the rest of
4966 compiler, so the code does not diverge, and the wrapper may run the
4967 code necessary for keeping the profiling sane. */
4969 switch (DECL_FUNCTION_CODE (fn))
4971 case BUILT_IN_FORK:
4972 id = get_identifier ("__gcov_fork");
4973 break;
4975 case BUILT_IN_EXECL:
4976 id = get_identifier ("__gcov_execl");
4977 break;
4979 case BUILT_IN_EXECV:
4980 id = get_identifier ("__gcov_execv");
4981 break;
4983 case BUILT_IN_EXECLP:
4984 id = get_identifier ("__gcov_execlp");
4985 break;
4987 case BUILT_IN_EXECLE:
4988 id = get_identifier ("__gcov_execle");
4989 break;
4991 case BUILT_IN_EXECVP:
4992 id = get_identifier ("__gcov_execvp");
4993 break;
4995 case BUILT_IN_EXECVE:
4996 id = get_identifier ("__gcov_execve");
4997 break;
4999 default:
5000 gcc_unreachable ();
5003 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5004 FUNCTION_DECL, id, TREE_TYPE (fn));
5005 DECL_EXTERNAL (decl) = 1;
5006 TREE_PUBLIC (decl) = 1;
5007 DECL_ARTIFICIAL (decl) = 1;
5008 TREE_NOTHROW (decl) = 1;
5009 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5010 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5011 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5012 return expand_call (call, target, ignore);
5017 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5018 the pointer in these functions is void*, the tree optimizers may remove
5019 casts. The mode computed in expand_builtin isn't reliable either, due
5020 to __sync_bool_compare_and_swap.
5022 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5023 group of builtins. This gives us log2 of the mode size. */
5025 static inline machine_mode
5026 get_builtin_sync_mode (int fcode_diff)
5028 /* The size is not negotiable, so ask not to get BLKmode in return
5029 if the target indicates that a smaller size would be better. */
5030 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5033 /* Expand the memory expression LOC and return the appropriate memory operand
5034 for the builtin_sync operations. */
5036 static rtx
5037 get_builtin_sync_mem (tree loc, machine_mode mode)
5039 rtx addr, mem;
5041 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5042 addr = convert_memory_address (Pmode, addr);
5044 /* Note that we explicitly do not want any alias information for this
5045 memory, so that we kill all other live memories. Otherwise we don't
5046 satisfy the full barrier semantics of the intrinsic. */
5047 mem = validize_mem (gen_rtx_MEM (mode, addr));
5049 /* The alignment needs to be at least according to that of the mode. */
5050 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5051 get_pointer_alignment (loc)));
5052 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5053 MEM_VOLATILE_P (mem) = 1;
5055 return mem;
5058 /* Make sure an argument is in the right mode.
5059 EXP is the tree argument.
5060 MODE is the mode it should be in. */
5062 static rtx
5063 expand_expr_force_mode (tree exp, machine_mode mode)
5065 rtx val;
5066 machine_mode old_mode;
5068 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5069 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5070 of CONST_INTs, where we know the old_mode only from the call argument. */
5072 old_mode = GET_MODE (val);
5073 if (old_mode == VOIDmode)
5074 old_mode = TYPE_MODE (TREE_TYPE (exp));
5075 val = convert_modes (mode, old_mode, val, 1);
5076 return val;
5080 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5081 EXP is the CALL_EXPR. CODE is the rtx code
5082 that corresponds to the arithmetic or logical operation from the name;
5083 an exception here is that NOT actually means NAND. TARGET is an optional
5084 place for us to store the results; AFTER is true if this is the
5085 fetch_and_xxx form. */
5087 static rtx
5088 expand_builtin_sync_operation (machine_mode mode, tree exp,
5089 enum rtx_code code, bool after,
5090 rtx target)
5092 rtx val, mem;
5093 location_t loc = EXPR_LOCATION (exp);
5095 if (code == NOT && warn_sync_nand)
5097 tree fndecl = get_callee_fndecl (exp);
5098 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5100 static bool warned_f_a_n, warned_n_a_f;
5102 switch (fcode)
5104 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5105 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5106 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5107 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5108 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5109 if (warned_f_a_n)
5110 break;
5112 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5113 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5114 warned_f_a_n = true;
5115 break;
5117 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5118 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5119 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5120 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5121 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5122 if (warned_n_a_f)
5123 break;
5125 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5126 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5127 warned_n_a_f = true;
5128 break;
5130 default:
5131 gcc_unreachable ();
5135 /* Expand the operands. */
5136 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5137 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5139 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5140 after);
5143 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5144 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5145 true if this is the boolean form. TARGET is a place for us to store the
5146 results; this is NOT optional if IS_BOOL is true. */
5148 static rtx
5149 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5150 bool is_bool, rtx target)
5152 rtx old_val, new_val, mem;
5153 rtx *pbool, *poval;
5155 /* Expand the operands. */
5156 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5157 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5158 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5160 pbool = poval = NULL;
5161 if (target != const0_rtx)
5163 if (is_bool)
5164 pbool = &target;
5165 else
5166 poval = &target;
5168 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5169 false, MEMMODEL_SEQ_CST,
5170 MEMMODEL_SEQ_CST))
5171 return NULL_RTX;
5173 return target;
5176 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5177 general form is actually an atomic exchange, and some targets only
5178 support a reduced form with the second argument being a constant 1.
5179 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5180 the results. */
5182 static rtx
5183 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5184 rtx target)
5186 rtx val, mem;
5188 /* Expand the operands. */
5189 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5190 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5192 return expand_sync_lock_test_and_set (target, mem, val);
5195 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5197 static void
5198 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5200 rtx mem;
5202 /* Expand the operands. */
5203 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5205 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5208 /* Given an integer representing an ``enum memmodel'', verify its
5209 correctness and return the memory model enum. */
5211 static enum memmodel
5212 get_memmodel (tree exp)
5214 rtx op;
5215 unsigned HOST_WIDE_INT val;
5217 /* If the parameter is not a constant, it's a run time value so we'll just
5218 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5219 if (TREE_CODE (exp) != INTEGER_CST)
5220 return MEMMODEL_SEQ_CST;
5222 op = expand_normal (exp);
5224 val = INTVAL (op);
5225 if (targetm.memmodel_check)
5226 val = targetm.memmodel_check (val);
5227 else if (val & ~MEMMODEL_MASK)
5229 warning (OPT_Winvalid_memory_model,
5230 "Unknown architecture specifier in memory model to builtin.");
5231 return MEMMODEL_SEQ_CST;
5234 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5236 warning (OPT_Winvalid_memory_model,
5237 "invalid memory model argument to builtin");
5238 return MEMMODEL_SEQ_CST;
5241 return (enum memmodel) val;
5244 /* Expand the __atomic_exchange intrinsic:
5245 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5246 EXP is the CALL_EXPR.
5247 TARGET is an optional place for us to store the results. */
5249 static rtx
5250 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5252 rtx val, mem;
5253 enum memmodel model;
5255 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5256 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5258 error ("invalid memory model for %<__atomic_exchange%>");
5259 return NULL_RTX;
5262 if (!flag_inline_atomics)
5263 return NULL_RTX;
5265 /* Expand the operands. */
5266 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5267 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5269 return expand_atomic_exchange (target, mem, val, model);
5272 /* Expand the __atomic_compare_exchange intrinsic:
5273 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5274 TYPE desired, BOOL weak,
5275 enum memmodel success,
5276 enum memmodel failure)
5277 EXP is the CALL_EXPR.
5278 TARGET is an optional place for us to store the results. */
5280 static rtx
5281 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5282 rtx target)
5284 rtx expect, desired, mem, oldval;
5285 rtx_code_label *label;
5286 enum memmodel success, failure;
5287 tree weak;
5288 bool is_weak;
5290 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5291 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5293 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5294 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5296 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5297 return NULL_RTX;
5300 if (failure > success)
5302 error ("failure memory model cannot be stronger than success "
5303 "memory model for %<__atomic_compare_exchange%>");
5304 return NULL_RTX;
5307 if (!flag_inline_atomics)
5308 return NULL_RTX;
5310 /* Expand the operands. */
5311 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5313 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5314 expect = convert_memory_address (Pmode, expect);
5315 expect = gen_rtx_MEM (mode, expect);
5316 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5318 weak = CALL_EXPR_ARG (exp, 3);
5319 is_weak = false;
5320 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5321 is_weak = true;
5323 if (target == const0_rtx)
5324 target = NULL;
5326 /* Lest the rtl backend create a race condition with an imporoper store
5327 to memory, always create a new pseudo for OLDVAL. */
5328 oldval = NULL;
5330 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5331 is_weak, success, failure))
5332 return NULL_RTX;
5334 /* Conditionally store back to EXPECT, lest we create a race condition
5335 with an improper store to memory. */
5336 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5337 the normal case where EXPECT is totally private, i.e. a register. At
5338 which point the store can be unconditional. */
5339 label = gen_label_rtx ();
5340 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5341 emit_move_insn (expect, oldval);
5342 emit_label (label);
5344 return target;
5347 /* Expand the __atomic_load intrinsic:
5348 TYPE __atomic_load (TYPE *object, enum memmodel)
5349 EXP is the CALL_EXPR.
5350 TARGET is an optional place for us to store the results. */
5352 static rtx
5353 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5355 rtx mem;
5356 enum memmodel model;
5358 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5359 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5360 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5362 error ("invalid memory model for %<__atomic_load%>");
5363 return NULL_RTX;
5366 if (!flag_inline_atomics)
5367 return NULL_RTX;
5369 /* Expand the operand. */
5370 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5372 return expand_atomic_load (target, mem, model);
5376 /* Expand the __atomic_store intrinsic:
5377 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5378 EXP is the CALL_EXPR.
5379 TARGET is an optional place for us to store the results. */
5381 static rtx
5382 expand_builtin_atomic_store (machine_mode mode, tree exp)
5384 rtx mem, val;
5385 enum memmodel model;
5387 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5388 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5389 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5390 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5392 error ("invalid memory model for %<__atomic_store%>");
5393 return NULL_RTX;
5396 if (!flag_inline_atomics)
5397 return NULL_RTX;
5399 /* Expand the operands. */
5400 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5401 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5403 return expand_atomic_store (mem, val, model, false);
5406 /* Expand the __atomic_fetch_XXX intrinsic:
5407 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5408 EXP is the CALL_EXPR.
5409 TARGET is an optional place for us to store the results.
5410 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5411 FETCH_AFTER is true if returning the result of the operation.
5412 FETCH_AFTER is false if returning the value before the operation.
5413 IGNORE is true if the result is not used.
5414 EXT_CALL is the correct builtin for an external call if this cannot be
5415 resolved to an instruction sequence. */
5417 static rtx
5418 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5419 enum rtx_code code, bool fetch_after,
5420 bool ignore, enum built_in_function ext_call)
5422 rtx val, mem, ret;
5423 enum memmodel model;
5424 tree fndecl;
5425 tree addr;
5427 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5429 /* Expand the operands. */
5430 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5431 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5433 /* Only try generating instructions if inlining is turned on. */
5434 if (flag_inline_atomics)
5436 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5437 if (ret)
5438 return ret;
5441 /* Return if a different routine isn't needed for the library call. */
5442 if (ext_call == BUILT_IN_NONE)
5443 return NULL_RTX;
5445 /* Change the call to the specified function. */
5446 fndecl = get_callee_fndecl (exp);
5447 addr = CALL_EXPR_FN (exp);
5448 STRIP_NOPS (addr);
5450 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5451 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5453 /* Expand the call here so we can emit trailing code. */
5454 ret = expand_call (exp, target, ignore);
5456 /* Replace the original function just in case it matters. */
5457 TREE_OPERAND (addr, 0) = fndecl;
5459 /* Then issue the arithmetic correction to return the right result. */
5460 if (!ignore)
5462 if (code == NOT)
5464 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5465 OPTAB_LIB_WIDEN);
5466 ret = expand_simple_unop (mode, NOT, ret, target, true);
5468 else
5469 ret = expand_simple_binop (mode, code, ret, val, target, true,
5470 OPTAB_LIB_WIDEN);
5472 return ret;
5476 #ifndef HAVE_atomic_clear
5477 # define HAVE_atomic_clear 0
5478 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5479 #endif
5481 /* Expand an atomic clear operation.
5482 void _atomic_clear (BOOL *obj, enum memmodel)
5483 EXP is the call expression. */
5485 static rtx
5486 expand_builtin_atomic_clear (tree exp)
5488 machine_mode mode;
5489 rtx mem, ret;
5490 enum memmodel model;
5492 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5493 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5494 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5496 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5497 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5499 error ("invalid memory model for %<__atomic_store%>");
5500 return const0_rtx;
5503 if (HAVE_atomic_clear)
5505 emit_insn (gen_atomic_clear (mem, model));
5506 return const0_rtx;
5509 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5510 Failing that, a store is issued by __atomic_store. The only way this can
5511 fail is if the bool type is larger than a word size. Unlikely, but
5512 handle it anyway for completeness. Assume a single threaded model since
5513 there is no atomic support in this case, and no barriers are required. */
5514 ret = expand_atomic_store (mem, const0_rtx, model, true);
5515 if (!ret)
5516 emit_move_insn (mem, const0_rtx);
5517 return const0_rtx;
5520 /* Expand an atomic test_and_set operation.
5521 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5522 EXP is the call expression. */
5524 static rtx
5525 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5527 rtx mem;
5528 enum memmodel model;
5529 machine_mode mode;
5531 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5532 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5533 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5535 return expand_atomic_test_and_set (target, mem, model);
5539 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5540 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5542 static tree
5543 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5545 int size;
5546 machine_mode mode;
5547 unsigned int mode_align, type_align;
5549 if (TREE_CODE (arg0) != INTEGER_CST)
5550 return NULL_TREE;
5552 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5553 mode = mode_for_size (size, MODE_INT, 0);
5554 mode_align = GET_MODE_ALIGNMENT (mode);
5556 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5557 type_align = mode_align;
5558 else
5560 tree ttype = TREE_TYPE (arg1);
5562 /* This function is usually invoked and folded immediately by the front
5563 end before anything else has a chance to look at it. The pointer
5564 parameter at this point is usually cast to a void *, so check for that
5565 and look past the cast. */
5566 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5567 && VOID_TYPE_P (TREE_TYPE (ttype)))
5568 arg1 = TREE_OPERAND (arg1, 0);
5570 ttype = TREE_TYPE (arg1);
5571 gcc_assert (POINTER_TYPE_P (ttype));
5573 /* Get the underlying type of the object. */
5574 ttype = TREE_TYPE (ttype);
5575 type_align = TYPE_ALIGN (ttype);
5578 /* If the object has smaller alignment, the the lock free routines cannot
5579 be used. */
5580 if (type_align < mode_align)
5581 return boolean_false_node;
5583 /* Check if a compare_and_swap pattern exists for the mode which represents
5584 the required size. The pattern is not allowed to fail, so the existence
5585 of the pattern indicates support is present. */
5586 if (can_compare_and_swap_p (mode, true))
5587 return boolean_true_node;
5588 else
5589 return boolean_false_node;
5592 /* Return true if the parameters to call EXP represent an object which will
5593 always generate lock free instructions. The first argument represents the
5594 size of the object, and the second parameter is a pointer to the object
5595 itself. If NULL is passed for the object, then the result is based on
5596 typical alignment for an object of the specified size. Otherwise return
5597 false. */
5599 static rtx
5600 expand_builtin_atomic_always_lock_free (tree exp)
5602 tree size;
5603 tree arg0 = CALL_EXPR_ARG (exp, 0);
5604 tree arg1 = CALL_EXPR_ARG (exp, 1);
5606 if (TREE_CODE (arg0) != INTEGER_CST)
5608 error ("non-constant argument 1 to __atomic_always_lock_free");
5609 return const0_rtx;
5612 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5613 if (size == boolean_true_node)
5614 return const1_rtx;
5615 return const0_rtx;
5618 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5619 is lock free on this architecture. */
5621 static tree
5622 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5624 if (!flag_inline_atomics)
5625 return NULL_TREE;
5627 /* If it isn't always lock free, don't generate a result. */
5628 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5629 return boolean_true_node;
5631 return NULL_TREE;
5634 /* Return true if the parameters to call EXP represent an object which will
5635 always generate lock free instructions. The first argument represents the
5636 size of the object, and the second parameter is a pointer to the object
5637 itself. If NULL is passed for the object, then the result is based on
5638 typical alignment for an object of the specified size. Otherwise return
5639 NULL*/
5641 static rtx
5642 expand_builtin_atomic_is_lock_free (tree exp)
5644 tree size;
5645 tree arg0 = CALL_EXPR_ARG (exp, 0);
5646 tree arg1 = CALL_EXPR_ARG (exp, 1);
5648 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5650 error ("non-integer argument 1 to __atomic_is_lock_free");
5651 return NULL_RTX;
5654 if (!flag_inline_atomics)
5655 return NULL_RTX;
5657 /* If the value is known at compile time, return the RTX for it. */
5658 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5659 if (size == boolean_true_node)
5660 return const1_rtx;
5662 return NULL_RTX;
5665 /* Expand the __atomic_thread_fence intrinsic:
5666 void __atomic_thread_fence (enum memmodel)
5667 EXP is the CALL_EXPR. */
5669 static void
5670 expand_builtin_atomic_thread_fence (tree exp)
5672 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5673 expand_mem_thread_fence (model);
5676 /* Expand the __atomic_signal_fence intrinsic:
5677 void __atomic_signal_fence (enum memmodel)
5678 EXP is the CALL_EXPR. */
5680 static void
5681 expand_builtin_atomic_signal_fence (tree exp)
5683 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5684 expand_mem_signal_fence (model);
5687 /* Expand the __sync_synchronize intrinsic. */
5689 static void
5690 expand_builtin_sync_synchronize (void)
5692 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5695 static rtx
5696 expand_builtin_thread_pointer (tree exp, rtx target)
5698 enum insn_code icode;
5699 if (!validate_arglist (exp, VOID_TYPE))
5700 return const0_rtx;
5701 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5702 if (icode != CODE_FOR_nothing)
5704 struct expand_operand op;
5705 /* If the target is not sutitable then create a new target. */
5706 if (target == NULL_RTX
5707 || !REG_P (target)
5708 || GET_MODE (target) != Pmode)
5709 target = gen_reg_rtx (Pmode);
5710 create_output_operand (&op, target, Pmode);
5711 expand_insn (icode, 1, &op);
5712 return target;
5714 error ("__builtin_thread_pointer is not supported on this target");
5715 return const0_rtx;
5718 static void
5719 expand_builtin_set_thread_pointer (tree exp)
5721 enum insn_code icode;
5722 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5723 return;
5724 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5725 if (icode != CODE_FOR_nothing)
5727 struct expand_operand op;
5728 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5729 Pmode, EXPAND_NORMAL);
5730 create_input_operand (&op, val, Pmode);
5731 expand_insn (icode, 1, &op);
5732 return;
5734 error ("__builtin_set_thread_pointer is not supported on this target");
5738 /* Emit code to restore the current value of stack. */
5740 static void
5741 expand_stack_restore (tree var)
5743 rtx_insn *prev;
5744 rtx sa = expand_normal (var);
5746 sa = convert_memory_address (Pmode, sa);
5748 prev = get_last_insn ();
5749 emit_stack_restore (SAVE_BLOCK, sa);
5750 fixup_args_size_notes (prev, get_last_insn (), 0);
5754 /* Emit code to save the current value of stack. */
5756 static rtx
5757 expand_stack_save (void)
5759 rtx ret = NULL_RTX;
5761 do_pending_stack_adjust ();
5762 emit_stack_save (SAVE_BLOCK, &ret);
5763 return ret;
5766 /* Expand an expression EXP that calls a built-in function,
5767 with result going to TARGET if that's convenient
5768 (and in mode MODE if that's convenient).
5769 SUBTARGET may be used as the target for computing one of EXP's operands.
5770 IGNORE is nonzero if the value is to be ignored. */
5773 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5774 int ignore)
5776 tree fndecl = get_callee_fndecl (exp);
5777 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5778 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5779 int flags;
5781 /* When ASan is enabled, we don't want to expand some memory/string
5782 builtins and rely on libsanitizer's hooks. This allows us to avoid
5783 redundant checks and be sure, that possible overflow will be detected
5784 by ASan. */
5786 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5787 return expand_call (exp, target, ignore);
5789 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5790 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5792 /* When not optimizing, generate calls to library functions for a certain
5793 set of builtins. */
5794 if (!optimize
5795 && !called_as_built_in (fndecl)
5796 && fcode != BUILT_IN_FORK
5797 && fcode != BUILT_IN_EXECL
5798 && fcode != BUILT_IN_EXECV
5799 && fcode != BUILT_IN_EXECLP
5800 && fcode != BUILT_IN_EXECLE
5801 && fcode != BUILT_IN_EXECVP
5802 && fcode != BUILT_IN_EXECVE
5803 && fcode != BUILT_IN_ALLOCA
5804 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5805 && fcode != BUILT_IN_FREE
5806 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5807 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5808 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5809 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5810 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5811 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5812 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5813 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5814 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5815 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5816 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5817 && fcode != BUILT_IN_CHKP_BNDRET)
5818 return expand_call (exp, target, ignore);
5820 /* The built-in function expanders test for target == const0_rtx
5821 to determine whether the function's result will be ignored. */
5822 if (ignore)
5823 target = const0_rtx;
5825 /* If the result of a pure or const built-in function is ignored, and
5826 none of its arguments are volatile, we can avoid expanding the
5827 built-in call and just evaluate the arguments for side-effects. */
5828 if (target == const0_rtx
5829 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5830 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5832 bool volatilep = false;
5833 tree arg;
5834 call_expr_arg_iterator iter;
5836 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5837 if (TREE_THIS_VOLATILE (arg))
5839 volatilep = true;
5840 break;
5843 if (! volatilep)
5845 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5846 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5847 return const0_rtx;
5851 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5853 switch (fcode)
5855 CASE_FLT_FN (BUILT_IN_FABS):
5856 case BUILT_IN_FABSD32:
5857 case BUILT_IN_FABSD64:
5858 case BUILT_IN_FABSD128:
5859 target = expand_builtin_fabs (exp, target, subtarget);
5860 if (target)
5861 return target;
5862 break;
5864 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5865 target = expand_builtin_copysign (exp, target, subtarget);
5866 if (target)
5867 return target;
5868 break;
5870 /* Just do a normal library call if we were unable to fold
5871 the values. */
5872 CASE_FLT_FN (BUILT_IN_CABS):
5873 break;
5875 CASE_FLT_FN (BUILT_IN_EXP):
5876 CASE_FLT_FN (BUILT_IN_EXP10):
5877 CASE_FLT_FN (BUILT_IN_POW10):
5878 CASE_FLT_FN (BUILT_IN_EXP2):
5879 CASE_FLT_FN (BUILT_IN_EXPM1):
5880 CASE_FLT_FN (BUILT_IN_LOGB):
5881 CASE_FLT_FN (BUILT_IN_LOG):
5882 CASE_FLT_FN (BUILT_IN_LOG10):
5883 CASE_FLT_FN (BUILT_IN_LOG2):
5884 CASE_FLT_FN (BUILT_IN_LOG1P):
5885 CASE_FLT_FN (BUILT_IN_TAN):
5886 CASE_FLT_FN (BUILT_IN_ASIN):
5887 CASE_FLT_FN (BUILT_IN_ACOS):
5888 CASE_FLT_FN (BUILT_IN_ATAN):
5889 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5890 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5891 because of possible accuracy problems. */
5892 if (! flag_unsafe_math_optimizations)
5893 break;
5894 CASE_FLT_FN (BUILT_IN_SQRT):
5895 CASE_FLT_FN (BUILT_IN_FLOOR):
5896 CASE_FLT_FN (BUILT_IN_CEIL):
5897 CASE_FLT_FN (BUILT_IN_TRUNC):
5898 CASE_FLT_FN (BUILT_IN_ROUND):
5899 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5900 CASE_FLT_FN (BUILT_IN_RINT):
5901 target = expand_builtin_mathfn (exp, target, subtarget);
5902 if (target)
5903 return target;
5904 break;
5906 CASE_FLT_FN (BUILT_IN_FMA):
5907 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5908 if (target)
5909 return target;
5910 break;
5912 CASE_FLT_FN (BUILT_IN_ILOGB):
5913 if (! flag_unsafe_math_optimizations)
5914 break;
5915 CASE_FLT_FN (BUILT_IN_ISINF):
5916 CASE_FLT_FN (BUILT_IN_FINITE):
5917 case BUILT_IN_ISFINITE:
5918 case BUILT_IN_ISNORMAL:
5919 target = expand_builtin_interclass_mathfn (exp, target);
5920 if (target)
5921 return target;
5922 break;
5924 CASE_FLT_FN (BUILT_IN_ICEIL):
5925 CASE_FLT_FN (BUILT_IN_LCEIL):
5926 CASE_FLT_FN (BUILT_IN_LLCEIL):
5927 CASE_FLT_FN (BUILT_IN_LFLOOR):
5928 CASE_FLT_FN (BUILT_IN_IFLOOR):
5929 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5930 target = expand_builtin_int_roundingfn (exp, target);
5931 if (target)
5932 return target;
5933 break;
5935 CASE_FLT_FN (BUILT_IN_IRINT):
5936 CASE_FLT_FN (BUILT_IN_LRINT):
5937 CASE_FLT_FN (BUILT_IN_LLRINT):
5938 CASE_FLT_FN (BUILT_IN_IROUND):
5939 CASE_FLT_FN (BUILT_IN_LROUND):
5940 CASE_FLT_FN (BUILT_IN_LLROUND):
5941 target = expand_builtin_int_roundingfn_2 (exp, target);
5942 if (target)
5943 return target;
5944 break;
5946 CASE_FLT_FN (BUILT_IN_POWI):
5947 target = expand_builtin_powi (exp, target);
5948 if (target)
5949 return target;
5950 break;
5952 CASE_FLT_FN (BUILT_IN_ATAN2):
5953 CASE_FLT_FN (BUILT_IN_LDEXP):
5954 CASE_FLT_FN (BUILT_IN_SCALB):
5955 CASE_FLT_FN (BUILT_IN_SCALBN):
5956 CASE_FLT_FN (BUILT_IN_SCALBLN):
5957 if (! flag_unsafe_math_optimizations)
5958 break;
5960 CASE_FLT_FN (BUILT_IN_FMOD):
5961 CASE_FLT_FN (BUILT_IN_REMAINDER):
5962 CASE_FLT_FN (BUILT_IN_DREM):
5963 CASE_FLT_FN (BUILT_IN_POW):
5964 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5965 if (target)
5966 return target;
5967 break;
5969 CASE_FLT_FN (BUILT_IN_CEXPI):
5970 target = expand_builtin_cexpi (exp, target);
5971 gcc_assert (target);
5972 return target;
5974 CASE_FLT_FN (BUILT_IN_SIN):
5975 CASE_FLT_FN (BUILT_IN_COS):
5976 if (! flag_unsafe_math_optimizations)
5977 break;
5978 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5979 if (target)
5980 return target;
5981 break;
5983 CASE_FLT_FN (BUILT_IN_SINCOS):
5984 if (! flag_unsafe_math_optimizations)
5985 break;
5986 target = expand_builtin_sincos (exp);
5987 if (target)
5988 return target;
5989 break;
5991 case BUILT_IN_APPLY_ARGS:
5992 return expand_builtin_apply_args ();
5994 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5995 FUNCTION with a copy of the parameters described by
5996 ARGUMENTS, and ARGSIZE. It returns a block of memory
5997 allocated on the stack into which is stored all the registers
5998 that might possibly be used for returning the result of a
5999 function. ARGUMENTS is the value returned by
6000 __builtin_apply_args. ARGSIZE is the number of bytes of
6001 arguments that must be copied. ??? How should this value be
6002 computed? We'll also need a safe worst case value for varargs
6003 functions. */
6004 case BUILT_IN_APPLY:
6005 if (!validate_arglist (exp, POINTER_TYPE,
6006 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6007 && !validate_arglist (exp, REFERENCE_TYPE,
6008 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6009 return const0_rtx;
6010 else
6012 rtx ops[3];
6014 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6015 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6016 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6018 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6021 /* __builtin_return (RESULT) causes the function to return the
6022 value described by RESULT. RESULT is address of the block of
6023 memory returned by __builtin_apply. */
6024 case BUILT_IN_RETURN:
6025 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6026 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6027 return const0_rtx;
6029 case BUILT_IN_SAVEREGS:
6030 return expand_builtin_saveregs ();
6032 case BUILT_IN_VA_ARG_PACK:
6033 /* All valid uses of __builtin_va_arg_pack () are removed during
6034 inlining. */
6035 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6036 return const0_rtx;
6038 case BUILT_IN_VA_ARG_PACK_LEN:
6039 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6040 inlining. */
6041 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6042 return const0_rtx;
6044 /* Return the address of the first anonymous stack arg. */
6045 case BUILT_IN_NEXT_ARG:
6046 if (fold_builtin_next_arg (exp, false))
6047 return const0_rtx;
6048 return expand_builtin_next_arg ();
6050 case BUILT_IN_CLEAR_CACHE:
6051 target = expand_builtin___clear_cache (exp);
6052 if (target)
6053 return target;
6054 break;
6056 case BUILT_IN_CLASSIFY_TYPE:
6057 return expand_builtin_classify_type (exp);
6059 case BUILT_IN_CONSTANT_P:
6060 return const0_rtx;
6062 case BUILT_IN_FRAME_ADDRESS:
6063 case BUILT_IN_RETURN_ADDRESS:
6064 return expand_builtin_frame_address (fndecl, exp);
6066 /* Returns the address of the area where the structure is returned.
6067 0 otherwise. */
6068 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6069 if (call_expr_nargs (exp) != 0
6070 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6071 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6072 return const0_rtx;
6073 else
6074 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6076 case BUILT_IN_ALLOCA:
6077 case BUILT_IN_ALLOCA_WITH_ALIGN:
6078 /* If the allocation stems from the declaration of a variable-sized
6079 object, it cannot accumulate. */
6080 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6081 if (target)
6082 return target;
6083 break;
6085 case BUILT_IN_STACK_SAVE:
6086 return expand_stack_save ();
6088 case BUILT_IN_STACK_RESTORE:
6089 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6090 return const0_rtx;
6092 case BUILT_IN_BSWAP16:
6093 case BUILT_IN_BSWAP32:
6094 case BUILT_IN_BSWAP64:
6095 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6096 if (target)
6097 return target;
6098 break;
6100 CASE_INT_FN (BUILT_IN_FFS):
6101 target = expand_builtin_unop (target_mode, exp, target,
6102 subtarget, ffs_optab);
6103 if (target)
6104 return target;
6105 break;
6107 CASE_INT_FN (BUILT_IN_CLZ):
6108 target = expand_builtin_unop (target_mode, exp, target,
6109 subtarget, clz_optab);
6110 if (target)
6111 return target;
6112 break;
6114 CASE_INT_FN (BUILT_IN_CTZ):
6115 target = expand_builtin_unop (target_mode, exp, target,
6116 subtarget, ctz_optab);
6117 if (target)
6118 return target;
6119 break;
6121 CASE_INT_FN (BUILT_IN_CLRSB):
6122 target = expand_builtin_unop (target_mode, exp, target,
6123 subtarget, clrsb_optab);
6124 if (target)
6125 return target;
6126 break;
6128 CASE_INT_FN (BUILT_IN_POPCOUNT):
6129 target = expand_builtin_unop (target_mode, exp, target,
6130 subtarget, popcount_optab);
6131 if (target)
6132 return target;
6133 break;
6135 CASE_INT_FN (BUILT_IN_PARITY):
6136 target = expand_builtin_unop (target_mode, exp, target,
6137 subtarget, parity_optab);
6138 if (target)
6139 return target;
6140 break;
6142 case BUILT_IN_STRLEN:
6143 target = expand_builtin_strlen (exp, target, target_mode);
6144 if (target)
6145 return target;
6146 break;
6148 case BUILT_IN_STRCPY:
6149 target = expand_builtin_strcpy (exp, target);
6150 if (target)
6151 return target;
6152 break;
6154 case BUILT_IN_STRNCPY:
6155 target = expand_builtin_strncpy (exp, target);
6156 if (target)
6157 return target;
6158 break;
6160 case BUILT_IN_STPCPY:
6161 target = expand_builtin_stpcpy (exp, target, mode);
6162 if (target)
6163 return target;
6164 break;
6166 case BUILT_IN_MEMCPY:
6167 target = expand_builtin_memcpy (exp, target);
6168 if (target)
6169 return target;
6170 break;
6172 case BUILT_IN_MEMPCPY:
6173 target = expand_builtin_mempcpy (exp, target, mode);
6174 if (target)
6175 return target;
6176 break;
6178 case BUILT_IN_MEMSET:
6179 target = expand_builtin_memset (exp, target, mode);
6180 if (target)
6181 return target;
6182 break;
6184 case BUILT_IN_BZERO:
6185 target = expand_builtin_bzero (exp);
6186 if (target)
6187 return target;
6188 break;
6190 case BUILT_IN_STRCMP:
6191 target = expand_builtin_strcmp (exp, target);
6192 if (target)
6193 return target;
6194 break;
6196 case BUILT_IN_STRNCMP:
6197 target = expand_builtin_strncmp (exp, target, mode);
6198 if (target)
6199 return target;
6200 break;
6202 case BUILT_IN_BCMP:
6203 case BUILT_IN_MEMCMP:
6204 target = expand_builtin_memcmp (exp, target, mode);
6205 if (target)
6206 return target;
6207 break;
6209 case BUILT_IN_SETJMP:
6210 /* This should have been lowered to the builtins below. */
6211 gcc_unreachable ();
6213 case BUILT_IN_SETJMP_SETUP:
6214 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6215 and the receiver label. */
6216 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6218 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6219 VOIDmode, EXPAND_NORMAL);
6220 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6221 rtx label_r = label_rtx (label);
6223 /* This is copied from the handling of non-local gotos. */
6224 expand_builtin_setjmp_setup (buf_addr, label_r);
6225 nonlocal_goto_handler_labels
6226 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6227 nonlocal_goto_handler_labels);
6228 /* ??? Do not let expand_label treat us as such since we would
6229 not want to be both on the list of non-local labels and on
6230 the list of forced labels. */
6231 FORCED_LABEL (label) = 0;
6232 return const0_rtx;
6234 break;
6236 case BUILT_IN_SETJMP_RECEIVER:
6237 /* __builtin_setjmp_receiver is passed the receiver label. */
6238 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6240 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6241 rtx label_r = label_rtx (label);
6243 expand_builtin_setjmp_receiver (label_r);
6244 return const0_rtx;
6246 break;
6248 /* __builtin_longjmp is passed a pointer to an array of five words.
6249 It's similar to the C library longjmp function but works with
6250 __builtin_setjmp above. */
6251 case BUILT_IN_LONGJMP:
6252 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6254 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6255 VOIDmode, EXPAND_NORMAL);
6256 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6258 if (value != const1_rtx)
6260 error ("%<__builtin_longjmp%> second argument must be 1");
6261 return const0_rtx;
6264 expand_builtin_longjmp (buf_addr, value);
6265 return const0_rtx;
6267 break;
6269 case BUILT_IN_NONLOCAL_GOTO:
6270 target = expand_builtin_nonlocal_goto (exp);
6271 if (target)
6272 return target;
6273 break;
6275 /* This updates the setjmp buffer that is its argument with the value
6276 of the current stack pointer. */
6277 case BUILT_IN_UPDATE_SETJMP_BUF:
6278 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6280 rtx buf_addr
6281 = expand_normal (CALL_EXPR_ARG (exp, 0));
6283 expand_builtin_update_setjmp_buf (buf_addr);
6284 return const0_rtx;
6286 break;
6288 case BUILT_IN_TRAP:
6289 expand_builtin_trap ();
6290 return const0_rtx;
6292 case BUILT_IN_UNREACHABLE:
6293 expand_builtin_unreachable ();
6294 return const0_rtx;
6296 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6297 case BUILT_IN_SIGNBITD32:
6298 case BUILT_IN_SIGNBITD64:
6299 case BUILT_IN_SIGNBITD128:
6300 target = expand_builtin_signbit (exp, target);
6301 if (target)
6302 return target;
6303 break;
6305 /* Various hooks for the DWARF 2 __throw routine. */
6306 case BUILT_IN_UNWIND_INIT:
6307 expand_builtin_unwind_init ();
6308 return const0_rtx;
6309 case BUILT_IN_DWARF_CFA:
6310 return virtual_cfa_rtx;
6311 #ifdef DWARF2_UNWIND_INFO
6312 case BUILT_IN_DWARF_SP_COLUMN:
6313 return expand_builtin_dwarf_sp_column ();
6314 case BUILT_IN_INIT_DWARF_REG_SIZES:
6315 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6316 return const0_rtx;
6317 #endif
6318 case BUILT_IN_FROB_RETURN_ADDR:
6319 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6320 case BUILT_IN_EXTRACT_RETURN_ADDR:
6321 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6322 case BUILT_IN_EH_RETURN:
6323 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6324 CALL_EXPR_ARG (exp, 1));
6325 return const0_rtx;
6326 #ifdef EH_RETURN_DATA_REGNO
6327 case BUILT_IN_EH_RETURN_DATA_REGNO:
6328 return expand_builtin_eh_return_data_regno (exp);
6329 #endif
6330 case BUILT_IN_EXTEND_POINTER:
6331 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6332 case BUILT_IN_EH_POINTER:
6333 return expand_builtin_eh_pointer (exp);
6334 case BUILT_IN_EH_FILTER:
6335 return expand_builtin_eh_filter (exp);
6336 case BUILT_IN_EH_COPY_VALUES:
6337 return expand_builtin_eh_copy_values (exp);
6339 case BUILT_IN_VA_START:
6340 return expand_builtin_va_start (exp);
6341 case BUILT_IN_VA_END:
6342 return expand_builtin_va_end (exp);
6343 case BUILT_IN_VA_COPY:
6344 return expand_builtin_va_copy (exp);
6345 case BUILT_IN_EXPECT:
6346 return expand_builtin_expect (exp, target);
6347 case BUILT_IN_ASSUME_ALIGNED:
6348 return expand_builtin_assume_aligned (exp, target);
6349 case BUILT_IN_PREFETCH:
6350 expand_builtin_prefetch (exp);
6351 return const0_rtx;
6353 case BUILT_IN_INIT_TRAMPOLINE:
6354 return expand_builtin_init_trampoline (exp, true);
6355 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6356 return expand_builtin_init_trampoline (exp, false);
6357 case BUILT_IN_ADJUST_TRAMPOLINE:
6358 return expand_builtin_adjust_trampoline (exp);
6360 case BUILT_IN_FORK:
6361 case BUILT_IN_EXECL:
6362 case BUILT_IN_EXECV:
6363 case BUILT_IN_EXECLP:
6364 case BUILT_IN_EXECLE:
6365 case BUILT_IN_EXECVP:
6366 case BUILT_IN_EXECVE:
6367 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6368 if (target)
6369 return target;
6370 break;
6372 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6373 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6374 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6375 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6376 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6377 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6378 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6379 if (target)
6380 return target;
6381 break;
6383 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6384 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6385 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6386 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6387 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6388 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6389 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6390 if (target)
6391 return target;
6392 break;
6394 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6395 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6396 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6397 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6398 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6399 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6400 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6401 if (target)
6402 return target;
6403 break;
6405 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6406 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6407 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6408 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6409 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6410 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6411 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6412 if (target)
6413 return target;
6414 break;
6416 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6417 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6418 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6419 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6420 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6421 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6422 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6423 if (target)
6424 return target;
6425 break;
6427 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6428 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6429 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6430 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6431 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6432 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6433 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6434 if (target)
6435 return target;
6436 break;
6438 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6439 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6440 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6441 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6442 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6443 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6444 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6445 if (target)
6446 return target;
6447 break;
6449 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6450 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6451 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6452 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6453 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6454 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6455 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6456 if (target)
6457 return target;
6458 break;
6460 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6461 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6462 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6463 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6464 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6465 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6466 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6467 if (target)
6468 return target;
6469 break;
6471 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6472 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6473 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6474 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6475 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6476 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6477 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6478 if (target)
6479 return target;
6480 break;
6482 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6483 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6484 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6485 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6486 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6487 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6488 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6489 if (target)
6490 return target;
6491 break;
6493 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6494 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6495 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6496 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6497 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6498 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6499 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6500 if (target)
6501 return target;
6502 break;
6504 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6505 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6506 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6507 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6508 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6509 if (mode == VOIDmode)
6510 mode = TYPE_MODE (boolean_type_node);
6511 if (!target || !register_operand (target, mode))
6512 target = gen_reg_rtx (mode);
6514 mode = get_builtin_sync_mode
6515 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6516 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6517 if (target)
6518 return target;
6519 break;
6521 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6522 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6523 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6524 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6525 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6526 mode = get_builtin_sync_mode
6527 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6528 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6529 if (target)
6530 return target;
6531 break;
6533 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6534 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6535 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6536 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6537 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6538 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6539 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6540 if (target)
6541 return target;
6542 break;
6544 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6545 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6546 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6547 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6548 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6549 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6550 expand_builtin_sync_lock_release (mode, exp);
6551 return const0_rtx;
6553 case BUILT_IN_SYNC_SYNCHRONIZE:
6554 expand_builtin_sync_synchronize ();
6555 return const0_rtx;
6557 case BUILT_IN_ATOMIC_EXCHANGE_1:
6558 case BUILT_IN_ATOMIC_EXCHANGE_2:
6559 case BUILT_IN_ATOMIC_EXCHANGE_4:
6560 case BUILT_IN_ATOMIC_EXCHANGE_8:
6561 case BUILT_IN_ATOMIC_EXCHANGE_16:
6562 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6563 target = expand_builtin_atomic_exchange (mode, exp, target);
6564 if (target)
6565 return target;
6566 break;
6568 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6569 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6570 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6571 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6572 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6574 unsigned int nargs, z;
6575 vec<tree, va_gc> *vec;
6577 mode =
6578 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6579 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6580 if (target)
6581 return target;
6583 /* If this is turned into an external library call, the weak parameter
6584 must be dropped to match the expected parameter list. */
6585 nargs = call_expr_nargs (exp);
6586 vec_alloc (vec, nargs - 1);
6587 for (z = 0; z < 3; z++)
6588 vec->quick_push (CALL_EXPR_ARG (exp, z));
6589 /* Skip the boolean weak parameter. */
6590 for (z = 4; z < 6; z++)
6591 vec->quick_push (CALL_EXPR_ARG (exp, z));
6592 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6593 break;
6596 case BUILT_IN_ATOMIC_LOAD_1:
6597 case BUILT_IN_ATOMIC_LOAD_2:
6598 case BUILT_IN_ATOMIC_LOAD_4:
6599 case BUILT_IN_ATOMIC_LOAD_8:
6600 case BUILT_IN_ATOMIC_LOAD_16:
6601 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6602 target = expand_builtin_atomic_load (mode, exp, target);
6603 if (target)
6604 return target;
6605 break;
6607 case BUILT_IN_ATOMIC_STORE_1:
6608 case BUILT_IN_ATOMIC_STORE_2:
6609 case BUILT_IN_ATOMIC_STORE_4:
6610 case BUILT_IN_ATOMIC_STORE_8:
6611 case BUILT_IN_ATOMIC_STORE_16:
6612 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6613 target = expand_builtin_atomic_store (mode, exp);
6614 if (target)
6615 return const0_rtx;
6616 break;
6618 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6619 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6620 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6621 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6622 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6624 enum built_in_function lib;
6625 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6626 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6627 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6628 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6629 ignore, lib);
6630 if (target)
6631 return target;
6632 break;
6634 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6635 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6636 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6637 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6638 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6640 enum built_in_function lib;
6641 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6642 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6643 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6644 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6645 ignore, lib);
6646 if (target)
6647 return target;
6648 break;
6650 case BUILT_IN_ATOMIC_AND_FETCH_1:
6651 case BUILT_IN_ATOMIC_AND_FETCH_2:
6652 case BUILT_IN_ATOMIC_AND_FETCH_4:
6653 case BUILT_IN_ATOMIC_AND_FETCH_8:
6654 case BUILT_IN_ATOMIC_AND_FETCH_16:
6656 enum built_in_function lib;
6657 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6658 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6659 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6660 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6661 ignore, lib);
6662 if (target)
6663 return target;
6664 break;
6666 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6667 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6668 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6669 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6670 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6672 enum built_in_function lib;
6673 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6674 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6675 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6676 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6677 ignore, lib);
6678 if (target)
6679 return target;
6680 break;
6682 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6683 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6684 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6685 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6686 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6688 enum built_in_function lib;
6689 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6690 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6691 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6692 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6693 ignore, lib);
6694 if (target)
6695 return target;
6696 break;
6698 case BUILT_IN_ATOMIC_OR_FETCH_1:
6699 case BUILT_IN_ATOMIC_OR_FETCH_2:
6700 case BUILT_IN_ATOMIC_OR_FETCH_4:
6701 case BUILT_IN_ATOMIC_OR_FETCH_8:
6702 case BUILT_IN_ATOMIC_OR_FETCH_16:
6704 enum built_in_function lib;
6705 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6706 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6707 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6708 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6709 ignore, lib);
6710 if (target)
6711 return target;
6712 break;
6714 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6715 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6716 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6717 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6718 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6719 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6720 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6721 ignore, BUILT_IN_NONE);
6722 if (target)
6723 return target;
6724 break;
6726 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6727 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6728 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6729 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6730 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6731 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6732 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6733 ignore, BUILT_IN_NONE);
6734 if (target)
6735 return target;
6736 break;
6738 case BUILT_IN_ATOMIC_FETCH_AND_1:
6739 case BUILT_IN_ATOMIC_FETCH_AND_2:
6740 case BUILT_IN_ATOMIC_FETCH_AND_4:
6741 case BUILT_IN_ATOMIC_FETCH_AND_8:
6742 case BUILT_IN_ATOMIC_FETCH_AND_16:
6743 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6744 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6745 ignore, BUILT_IN_NONE);
6746 if (target)
6747 return target;
6748 break;
6750 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6751 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6752 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6753 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6754 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6755 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6756 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6757 ignore, BUILT_IN_NONE);
6758 if (target)
6759 return target;
6760 break;
6762 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6763 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6764 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6765 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6766 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6767 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6768 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6769 ignore, BUILT_IN_NONE);
6770 if (target)
6771 return target;
6772 break;
6774 case BUILT_IN_ATOMIC_FETCH_OR_1:
6775 case BUILT_IN_ATOMIC_FETCH_OR_2:
6776 case BUILT_IN_ATOMIC_FETCH_OR_4:
6777 case BUILT_IN_ATOMIC_FETCH_OR_8:
6778 case BUILT_IN_ATOMIC_FETCH_OR_16:
6779 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6780 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6781 ignore, BUILT_IN_NONE);
6782 if (target)
6783 return target;
6784 break;
6786 case BUILT_IN_ATOMIC_TEST_AND_SET:
6787 return expand_builtin_atomic_test_and_set (exp, target);
6789 case BUILT_IN_ATOMIC_CLEAR:
6790 return expand_builtin_atomic_clear (exp);
6792 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6793 return expand_builtin_atomic_always_lock_free (exp);
6795 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6796 target = expand_builtin_atomic_is_lock_free (exp);
6797 if (target)
6798 return target;
6799 break;
6801 case BUILT_IN_ATOMIC_THREAD_FENCE:
6802 expand_builtin_atomic_thread_fence (exp);
6803 return const0_rtx;
6805 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6806 expand_builtin_atomic_signal_fence (exp);
6807 return const0_rtx;
6809 case BUILT_IN_OBJECT_SIZE:
6810 return expand_builtin_object_size (exp);
6812 case BUILT_IN_MEMCPY_CHK:
6813 case BUILT_IN_MEMPCPY_CHK:
6814 case BUILT_IN_MEMMOVE_CHK:
6815 case BUILT_IN_MEMSET_CHK:
6816 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6817 if (target)
6818 return target;
6819 break;
6821 case BUILT_IN_STRCPY_CHK:
6822 case BUILT_IN_STPCPY_CHK:
6823 case BUILT_IN_STRNCPY_CHK:
6824 case BUILT_IN_STPNCPY_CHK:
6825 case BUILT_IN_STRCAT_CHK:
6826 case BUILT_IN_STRNCAT_CHK:
6827 case BUILT_IN_SNPRINTF_CHK:
6828 case BUILT_IN_VSNPRINTF_CHK:
6829 maybe_emit_chk_warning (exp, fcode);
6830 break;
6832 case BUILT_IN_SPRINTF_CHK:
6833 case BUILT_IN_VSPRINTF_CHK:
6834 maybe_emit_sprintf_chk_warning (exp, fcode);
6835 break;
6837 case BUILT_IN_FREE:
6838 if (warn_free_nonheap_object)
6839 maybe_emit_free_warning (exp);
6840 break;
6842 case BUILT_IN_THREAD_POINTER:
6843 return expand_builtin_thread_pointer (exp, target);
6845 case BUILT_IN_SET_THREAD_POINTER:
6846 expand_builtin_set_thread_pointer (exp);
6847 return const0_rtx;
6849 case BUILT_IN_CILK_DETACH:
6850 expand_builtin_cilk_detach (exp);
6851 return const0_rtx;
6853 case BUILT_IN_CILK_POP_FRAME:
6854 expand_builtin_cilk_pop_frame (exp);
6855 return const0_rtx;
6857 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6858 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6859 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6860 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6861 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6862 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6863 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6864 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6865 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6866 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6867 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6868 /* We allow user CHKP builtins if Pointer Bounds
6869 Checker is off. */
6870 if (!chkp_function_instrumented_p (current_function_decl))
6872 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6873 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6874 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6875 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6876 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6877 return expand_normal (CALL_EXPR_ARG (exp, 0));
6878 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6879 return expand_normal (size_zero_node);
6880 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6881 return expand_normal (size_int (-1));
6882 else
6883 return const0_rtx;
6885 /* FALLTHROUGH */
6887 case BUILT_IN_CHKP_BNDMK:
6888 case BUILT_IN_CHKP_BNDSTX:
6889 case BUILT_IN_CHKP_BNDCL:
6890 case BUILT_IN_CHKP_BNDCU:
6891 case BUILT_IN_CHKP_BNDLDX:
6892 case BUILT_IN_CHKP_BNDRET:
6893 case BUILT_IN_CHKP_INTERSECT:
6894 case BUILT_IN_CHKP_NARROW:
6895 case BUILT_IN_CHKP_EXTRACT_LOWER:
6896 case BUILT_IN_CHKP_EXTRACT_UPPER:
6897 /* Software implementation of Pointer Bounds Checker is NYI.
6898 Target support is required. */
6899 error ("Your target platform does not support -fcheck-pointer-bounds");
6900 break;
6902 default: /* just do library call, if unknown builtin */
6903 break;
6906 /* The switch statement above can drop through to cause the function
6907 to be called normally. */
6908 return expand_call (exp, target, ignore);
6911 /* Determine whether a tree node represents a call to a built-in
6912 function. If the tree T is a call to a built-in function with
6913 the right number of arguments of the appropriate types, return
6914 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6915 Otherwise the return value is END_BUILTINS. */
6917 enum built_in_function
6918 builtin_mathfn_code (const_tree t)
6920 const_tree fndecl, arg, parmlist;
6921 const_tree argtype, parmtype;
6922 const_call_expr_arg_iterator iter;
6924 if (TREE_CODE (t) != CALL_EXPR
6925 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6926 return END_BUILTINS;
6928 fndecl = get_callee_fndecl (t);
6929 if (fndecl == NULL_TREE
6930 || TREE_CODE (fndecl) != FUNCTION_DECL
6931 || ! DECL_BUILT_IN (fndecl)
6932 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6933 return END_BUILTINS;
6935 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6936 init_const_call_expr_arg_iterator (t, &iter);
6937 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6939 /* If a function doesn't take a variable number of arguments,
6940 the last element in the list will have type `void'. */
6941 parmtype = TREE_VALUE (parmlist);
6942 if (VOID_TYPE_P (parmtype))
6944 if (more_const_call_expr_args_p (&iter))
6945 return END_BUILTINS;
6946 return DECL_FUNCTION_CODE (fndecl);
6949 if (! more_const_call_expr_args_p (&iter))
6950 return END_BUILTINS;
6952 arg = next_const_call_expr_arg (&iter);
6953 argtype = TREE_TYPE (arg);
6955 if (SCALAR_FLOAT_TYPE_P (parmtype))
6957 if (! SCALAR_FLOAT_TYPE_P (argtype))
6958 return END_BUILTINS;
6960 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6962 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6963 return END_BUILTINS;
6965 else if (POINTER_TYPE_P (parmtype))
6967 if (! POINTER_TYPE_P (argtype))
6968 return END_BUILTINS;
6970 else if (INTEGRAL_TYPE_P (parmtype))
6972 if (! INTEGRAL_TYPE_P (argtype))
6973 return END_BUILTINS;
6975 else
6976 return END_BUILTINS;
6979 /* Variable-length argument list. */
6980 return DECL_FUNCTION_CODE (fndecl);
6983 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6984 evaluate to a constant. */
6986 static tree
6987 fold_builtin_constant_p (tree arg)
6989 /* We return 1 for a numeric type that's known to be a constant
6990 value at compile-time or for an aggregate type that's a
6991 literal constant. */
6992 STRIP_NOPS (arg);
6994 /* If we know this is a constant, emit the constant of one. */
6995 if (CONSTANT_CLASS_P (arg)
6996 || (TREE_CODE (arg) == CONSTRUCTOR
6997 && TREE_CONSTANT (arg)))
6998 return integer_one_node;
6999 if (TREE_CODE (arg) == ADDR_EXPR)
7001 tree op = TREE_OPERAND (arg, 0);
7002 if (TREE_CODE (op) == STRING_CST
7003 || (TREE_CODE (op) == ARRAY_REF
7004 && integer_zerop (TREE_OPERAND (op, 1))
7005 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7006 return integer_one_node;
7009 /* If this expression has side effects, show we don't know it to be a
7010 constant. Likewise if it's a pointer or aggregate type since in
7011 those case we only want literals, since those are only optimized
7012 when generating RTL, not later.
7013 And finally, if we are compiling an initializer, not code, we
7014 need to return a definite result now; there's not going to be any
7015 more optimization done. */
7016 if (TREE_SIDE_EFFECTS (arg)
7017 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7018 || POINTER_TYPE_P (TREE_TYPE (arg))
7019 || cfun == 0
7020 || folding_initializer
7021 || force_folding_builtin_constant_p)
7022 return integer_zero_node;
7024 return NULL_TREE;
7027 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7028 return it as a truthvalue. */
7030 static tree
7031 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7032 tree predictor)
7034 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7036 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7037 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7038 ret_type = TREE_TYPE (TREE_TYPE (fn));
7039 pred_type = TREE_VALUE (arg_types);
7040 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7042 pred = fold_convert_loc (loc, pred_type, pred);
7043 expected = fold_convert_loc (loc, expected_type, expected);
7044 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7045 predictor);
7047 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7048 build_int_cst (ret_type, 0));
7051 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7052 NULL_TREE if no simplification is possible. */
7054 tree
7055 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7057 tree inner, fndecl, inner_arg0;
7058 enum tree_code code;
7060 /* Distribute the expected value over short-circuiting operators.
7061 See through the cast from truthvalue_type_node to long. */
7062 inner_arg0 = arg0;
7063 while (CONVERT_EXPR_P (inner_arg0)
7064 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7065 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7066 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7068 /* If this is a builtin_expect within a builtin_expect keep the
7069 inner one. See through a comparison against a constant. It
7070 might have been added to create a thruthvalue. */
7071 inner = inner_arg0;
7073 if (COMPARISON_CLASS_P (inner)
7074 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7075 inner = TREE_OPERAND (inner, 0);
7077 if (TREE_CODE (inner) == CALL_EXPR
7078 && (fndecl = get_callee_fndecl (inner))
7079 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7080 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7081 return arg0;
7083 inner = inner_arg0;
7084 code = TREE_CODE (inner);
7085 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7087 tree op0 = TREE_OPERAND (inner, 0);
7088 tree op1 = TREE_OPERAND (inner, 1);
7090 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7091 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7092 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7094 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7097 /* If the argument isn't invariant then there's nothing else we can do. */
7098 if (!TREE_CONSTANT (inner_arg0))
7099 return NULL_TREE;
7101 /* If we expect that a comparison against the argument will fold to
7102 a constant return the constant. In practice, this means a true
7103 constant or the address of a non-weak symbol. */
7104 inner = inner_arg0;
7105 STRIP_NOPS (inner);
7106 if (TREE_CODE (inner) == ADDR_EXPR)
7110 inner = TREE_OPERAND (inner, 0);
7112 while (TREE_CODE (inner) == COMPONENT_REF
7113 || TREE_CODE (inner) == ARRAY_REF);
7114 if ((TREE_CODE (inner) == VAR_DECL
7115 || TREE_CODE (inner) == FUNCTION_DECL)
7116 && DECL_WEAK (inner))
7117 return NULL_TREE;
7120 /* Otherwise, ARG0 already has the proper type for the return value. */
7121 return arg0;
7124 /* Fold a call to __builtin_classify_type with argument ARG. */
7126 static tree
7127 fold_builtin_classify_type (tree arg)
7129 if (arg == 0)
7130 return build_int_cst (integer_type_node, no_type_class);
7132 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7135 /* Fold a call to __builtin_strlen with argument ARG. */
7137 static tree
7138 fold_builtin_strlen (location_t loc, tree type, tree arg)
7140 if (!validate_arg (arg, POINTER_TYPE))
7141 return NULL_TREE;
7142 else
7144 tree len = c_strlen (arg, 0);
7146 if (len)
7147 return fold_convert_loc (loc, type, len);
7149 return NULL_TREE;
7153 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7155 static tree
7156 fold_builtin_inf (location_t loc, tree type, int warn)
7158 REAL_VALUE_TYPE real;
7160 /* __builtin_inff is intended to be usable to define INFINITY on all
7161 targets. If an infinity is not available, INFINITY expands "to a
7162 positive constant of type float that overflows at translation
7163 time", footnote "In this case, using INFINITY will violate the
7164 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7165 Thus we pedwarn to ensure this constraint violation is
7166 diagnosed. */
7167 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7168 pedwarn (loc, 0, "target format does not support infinity");
7170 real_inf (&real);
7171 return build_real (type, real);
7174 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7176 static tree
7177 fold_builtin_nan (tree arg, tree type, int quiet)
7179 REAL_VALUE_TYPE real;
7180 const char *str;
7182 if (!validate_arg (arg, POINTER_TYPE))
7183 return NULL_TREE;
7184 str = c_getstr (arg);
7185 if (!str)
7186 return NULL_TREE;
7188 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7189 return NULL_TREE;
7191 return build_real (type, real);
7194 /* Return true if the floating point expression T has an integer value.
7195 We also allow +Inf, -Inf and NaN to be considered integer values. */
7197 static bool
7198 integer_valued_real_p (tree t)
7200 switch (TREE_CODE (t))
7202 case FLOAT_EXPR:
7203 return true;
7205 case ABS_EXPR:
7206 case SAVE_EXPR:
7207 return integer_valued_real_p (TREE_OPERAND (t, 0));
7209 case COMPOUND_EXPR:
7210 case MODIFY_EXPR:
7211 case BIND_EXPR:
7212 return integer_valued_real_p (TREE_OPERAND (t, 1));
7214 case PLUS_EXPR:
7215 case MINUS_EXPR:
7216 case MULT_EXPR:
7217 case MIN_EXPR:
7218 case MAX_EXPR:
7219 return integer_valued_real_p (TREE_OPERAND (t, 0))
7220 && integer_valued_real_p (TREE_OPERAND (t, 1));
7222 case COND_EXPR:
7223 return integer_valued_real_p (TREE_OPERAND (t, 1))
7224 && integer_valued_real_p (TREE_OPERAND (t, 2));
7226 case REAL_CST:
7227 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7229 CASE_CONVERT:
7231 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7232 if (TREE_CODE (type) == INTEGER_TYPE)
7233 return true;
7234 if (TREE_CODE (type) == REAL_TYPE)
7235 return integer_valued_real_p (TREE_OPERAND (t, 0));
7236 break;
7239 case CALL_EXPR:
7240 switch (builtin_mathfn_code (t))
7242 CASE_FLT_FN (BUILT_IN_CEIL):
7243 CASE_FLT_FN (BUILT_IN_FLOOR):
7244 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7245 CASE_FLT_FN (BUILT_IN_RINT):
7246 CASE_FLT_FN (BUILT_IN_ROUND):
7247 CASE_FLT_FN (BUILT_IN_TRUNC):
7248 return true;
7250 CASE_FLT_FN (BUILT_IN_FMIN):
7251 CASE_FLT_FN (BUILT_IN_FMAX):
7252 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7253 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7255 default:
7256 break;
7258 break;
7260 default:
7261 break;
7263 return false;
7266 /* FNDECL is assumed to be a builtin where truncation can be propagated
7267 across (for instance floor((double)f) == (double)floorf (f).
7268 Do the transformation for a call with argument ARG. */
7270 static tree
7271 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7273 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7275 if (!validate_arg (arg, REAL_TYPE))
7276 return NULL_TREE;
7278 /* Integer rounding functions are idempotent. */
7279 if (fcode == builtin_mathfn_code (arg))
7280 return arg;
7282 /* If argument is already integer valued, and we don't need to worry
7283 about setting errno, there's no need to perform rounding. */
7284 if (! flag_errno_math && integer_valued_real_p (arg))
7285 return arg;
7287 if (optimize)
7289 tree arg0 = strip_float_extensions (arg);
7290 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7291 tree newtype = TREE_TYPE (arg0);
7292 tree decl;
7294 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7295 && (decl = mathfn_built_in (newtype, fcode)))
7296 return fold_convert_loc (loc, ftype,
7297 build_call_expr_loc (loc, decl, 1,
7298 fold_convert_loc (loc,
7299 newtype,
7300 arg0)));
7302 return NULL_TREE;
7305 /* FNDECL is assumed to be builtin which can narrow the FP type of
7306 the argument, for instance lround((double)f) -> lroundf (f).
7307 Do the transformation for a call with argument ARG. */
7309 static tree
7310 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7312 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7314 if (!validate_arg (arg, REAL_TYPE))
7315 return NULL_TREE;
7317 /* If argument is already integer valued, and we don't need to worry
7318 about setting errno, there's no need to perform rounding. */
7319 if (! flag_errno_math && integer_valued_real_p (arg))
7320 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7321 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7323 if (optimize)
7325 tree ftype = TREE_TYPE (arg);
7326 tree arg0 = strip_float_extensions (arg);
7327 tree newtype = TREE_TYPE (arg0);
7328 tree decl;
7330 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7331 && (decl = mathfn_built_in (newtype, fcode)))
7332 return build_call_expr_loc (loc, decl, 1,
7333 fold_convert_loc (loc, newtype, arg0));
7336 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7337 sizeof (int) == sizeof (long). */
7338 if (TYPE_PRECISION (integer_type_node)
7339 == TYPE_PRECISION (long_integer_type_node))
7341 tree newfn = NULL_TREE;
7342 switch (fcode)
7344 CASE_FLT_FN (BUILT_IN_ICEIL):
7345 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7346 break;
7348 CASE_FLT_FN (BUILT_IN_IFLOOR):
7349 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7350 break;
7352 CASE_FLT_FN (BUILT_IN_IROUND):
7353 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7354 break;
7356 CASE_FLT_FN (BUILT_IN_IRINT):
7357 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7358 break;
7360 default:
7361 break;
7364 if (newfn)
7366 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7367 return fold_convert_loc (loc,
7368 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7372 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7373 sizeof (long long) == sizeof (long). */
7374 if (TYPE_PRECISION (long_long_integer_type_node)
7375 == TYPE_PRECISION (long_integer_type_node))
7377 tree newfn = NULL_TREE;
7378 switch (fcode)
7380 CASE_FLT_FN (BUILT_IN_LLCEIL):
7381 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7382 break;
7384 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7385 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7386 break;
7388 CASE_FLT_FN (BUILT_IN_LLROUND):
7389 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7390 break;
7392 CASE_FLT_FN (BUILT_IN_LLRINT):
7393 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7394 break;
7396 default:
7397 break;
7400 if (newfn)
7402 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7403 return fold_convert_loc (loc,
7404 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7408 return NULL_TREE;
7411 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7412 return type. Return NULL_TREE if no simplification can be made. */
7414 static tree
7415 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7417 tree res;
7419 if (!validate_arg (arg, COMPLEX_TYPE)
7420 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7421 return NULL_TREE;
7423 /* Calculate the result when the argument is a constant. */
7424 if (TREE_CODE (arg) == COMPLEX_CST
7425 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7426 type, mpfr_hypot)))
7427 return res;
7429 if (TREE_CODE (arg) == COMPLEX_EXPR)
7431 tree real = TREE_OPERAND (arg, 0);
7432 tree imag = TREE_OPERAND (arg, 1);
7434 /* If either part is zero, cabs is fabs of the other. */
7435 if (real_zerop (real))
7436 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7437 if (real_zerop (imag))
7438 return fold_build1_loc (loc, ABS_EXPR, type, real);
7440 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7441 if (flag_unsafe_math_optimizations
7442 && operand_equal_p (real, imag, OEP_PURE_SAME))
7444 const REAL_VALUE_TYPE sqrt2_trunc
7445 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7446 STRIP_NOPS (real);
7447 return fold_build2_loc (loc, MULT_EXPR, type,
7448 fold_build1_loc (loc, ABS_EXPR, type, real),
7449 build_real (type, sqrt2_trunc));
7453 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7454 if (TREE_CODE (arg) == NEGATE_EXPR
7455 || TREE_CODE (arg) == CONJ_EXPR)
7456 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7458 /* Don't do this when optimizing for size. */
7459 if (flag_unsafe_math_optimizations
7460 && optimize && optimize_function_for_speed_p (cfun))
7462 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7464 if (sqrtfn != NULL_TREE)
7466 tree rpart, ipart, result;
7468 arg = builtin_save_expr (arg);
7470 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7471 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7473 rpart = builtin_save_expr (rpart);
7474 ipart = builtin_save_expr (ipart);
7476 result = fold_build2_loc (loc, PLUS_EXPR, type,
7477 fold_build2_loc (loc, MULT_EXPR, type,
7478 rpart, rpart),
7479 fold_build2_loc (loc, MULT_EXPR, type,
7480 ipart, ipart));
7482 return build_call_expr_loc (loc, sqrtfn, 1, result);
7486 return NULL_TREE;
7489 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7490 complex tree type of the result. If NEG is true, the imaginary
7491 zero is negative. */
7493 static tree
7494 build_complex_cproj (tree type, bool neg)
7496 REAL_VALUE_TYPE rinf, rzero = dconst0;
7498 real_inf (&rinf);
7499 rzero.sign = neg;
7500 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7501 build_real (TREE_TYPE (type), rzero));
7504 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7505 return type. Return NULL_TREE if no simplification can be made. */
7507 static tree
7508 fold_builtin_cproj (location_t loc, tree arg, tree type)
7510 if (!validate_arg (arg, COMPLEX_TYPE)
7511 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7512 return NULL_TREE;
7514 /* If there are no infinities, return arg. */
7515 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7516 return non_lvalue_loc (loc, arg);
7518 /* Calculate the result when the argument is a constant. */
7519 if (TREE_CODE (arg) == COMPLEX_CST)
7521 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7522 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7524 if (real_isinf (real) || real_isinf (imag))
7525 return build_complex_cproj (type, imag->sign);
7526 else
7527 return arg;
7529 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7531 tree real = TREE_OPERAND (arg, 0);
7532 tree imag = TREE_OPERAND (arg, 1);
7534 STRIP_NOPS (real);
7535 STRIP_NOPS (imag);
7537 /* If the real part is inf and the imag part is known to be
7538 nonnegative, return (inf + 0i). Remember side-effects are
7539 possible in the imag part. */
7540 if (TREE_CODE (real) == REAL_CST
7541 && real_isinf (TREE_REAL_CST_PTR (real))
7542 && tree_expr_nonnegative_p (imag))
7543 return omit_one_operand_loc (loc, type,
7544 build_complex_cproj (type, false),
7545 arg);
7547 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7548 Remember side-effects are possible in the real part. */
7549 if (TREE_CODE (imag) == REAL_CST
7550 && real_isinf (TREE_REAL_CST_PTR (imag)))
7551 return
7552 omit_one_operand_loc (loc, type,
7553 build_complex_cproj (type, TREE_REAL_CST_PTR
7554 (imag)->sign), arg);
7557 return NULL_TREE;
7560 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7561 Return NULL_TREE if no simplification can be made. */
7563 static tree
7564 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7567 enum built_in_function fcode;
7568 tree res;
7570 if (!validate_arg (arg, REAL_TYPE))
7571 return NULL_TREE;
7573 /* Calculate the result when the argument is a constant. */
7574 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7575 return res;
7577 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7578 fcode = builtin_mathfn_code (arg);
7579 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7581 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7582 arg = fold_build2_loc (loc, MULT_EXPR, type,
7583 CALL_EXPR_ARG (arg, 0),
7584 build_real (type, dconsthalf));
7585 return build_call_expr_loc (loc, expfn, 1, arg);
7588 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7589 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7591 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7593 if (powfn)
7595 tree arg0 = CALL_EXPR_ARG (arg, 0);
7596 tree tree_root;
7597 /* The inner root was either sqrt or cbrt. */
7598 /* This was a conditional expression but it triggered a bug
7599 in Sun C 5.5. */
7600 REAL_VALUE_TYPE dconstroot;
7601 if (BUILTIN_SQRT_P (fcode))
7602 dconstroot = dconsthalf;
7603 else
7604 dconstroot = dconst_third ();
7606 /* Adjust for the outer root. */
7607 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7608 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7609 tree_root = build_real (type, dconstroot);
7610 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7614 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7615 if (flag_unsafe_math_optimizations
7616 && (fcode == BUILT_IN_POW
7617 || fcode == BUILT_IN_POWF
7618 || fcode == BUILT_IN_POWL))
7620 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7621 tree arg0 = CALL_EXPR_ARG (arg, 0);
7622 tree arg1 = CALL_EXPR_ARG (arg, 1);
7623 tree narg1;
7624 if (!tree_expr_nonnegative_p (arg0))
7625 arg0 = build1 (ABS_EXPR, type, arg0);
7626 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7627 build_real (type, dconsthalf));
7628 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7631 return NULL_TREE;
7634 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7635 Return NULL_TREE if no simplification can be made. */
7637 static tree
7638 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7640 const enum built_in_function fcode = builtin_mathfn_code (arg);
7641 tree res;
7643 if (!validate_arg (arg, REAL_TYPE))
7644 return NULL_TREE;
7646 /* Calculate the result when the argument is a constant. */
7647 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7648 return res;
7650 if (flag_unsafe_math_optimizations)
7652 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7653 if (BUILTIN_EXPONENT_P (fcode))
7655 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7656 const REAL_VALUE_TYPE third_trunc =
7657 real_value_truncate (TYPE_MODE (type), dconst_third ());
7658 arg = fold_build2_loc (loc, MULT_EXPR, type,
7659 CALL_EXPR_ARG (arg, 0),
7660 build_real (type, third_trunc));
7661 return build_call_expr_loc (loc, expfn, 1, arg);
7664 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7665 if (BUILTIN_SQRT_P (fcode))
7667 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7669 if (powfn)
7671 tree arg0 = CALL_EXPR_ARG (arg, 0);
7672 tree tree_root;
7673 REAL_VALUE_TYPE dconstroot = dconst_third ();
7675 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7676 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7677 tree_root = build_real (type, dconstroot);
7678 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7682 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7683 if (BUILTIN_CBRT_P (fcode))
7685 tree arg0 = CALL_EXPR_ARG (arg, 0);
7686 if (tree_expr_nonnegative_p (arg0))
7688 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7690 if (powfn)
7692 tree tree_root;
7693 REAL_VALUE_TYPE dconstroot;
7695 real_arithmetic (&dconstroot, MULT_EXPR,
7696 dconst_third_ptr (), dconst_third_ptr ());
7697 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7698 tree_root = build_real (type, dconstroot);
7699 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7704 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7705 if (fcode == BUILT_IN_POW
7706 || fcode == BUILT_IN_POWF
7707 || fcode == BUILT_IN_POWL)
7709 tree arg00 = CALL_EXPR_ARG (arg, 0);
7710 tree arg01 = CALL_EXPR_ARG (arg, 1);
7711 if (tree_expr_nonnegative_p (arg00))
7713 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7714 const REAL_VALUE_TYPE dconstroot
7715 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7716 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7717 build_real (type, dconstroot));
7718 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7722 return NULL_TREE;
7725 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7726 TYPE is the type of the return value. Return NULL_TREE if no
7727 simplification can be made. */
7729 static tree
7730 fold_builtin_cos (location_t loc,
7731 tree arg, tree type, tree fndecl)
7733 tree res, narg;
7735 if (!validate_arg (arg, REAL_TYPE))
7736 return NULL_TREE;
7738 /* Calculate the result when the argument is a constant. */
7739 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7740 return res;
7742 /* Optimize cos(-x) into cos (x). */
7743 if ((narg = fold_strip_sign_ops (arg)))
7744 return build_call_expr_loc (loc, fndecl, 1, narg);
7746 return NULL_TREE;
7749 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7750 Return NULL_TREE if no simplification can be made. */
7752 static tree
7753 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7755 if (validate_arg (arg, REAL_TYPE))
7757 tree res, narg;
7759 /* Calculate the result when the argument is a constant. */
7760 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7761 return res;
7763 /* Optimize cosh(-x) into cosh (x). */
7764 if ((narg = fold_strip_sign_ops (arg)))
7765 return build_call_expr_loc (loc, fndecl, 1, narg);
7768 return NULL_TREE;
7771 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7772 argument ARG. TYPE is the type of the return value. Return
7773 NULL_TREE if no simplification can be made. */
7775 static tree
7776 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7777 bool hyper)
7779 if (validate_arg (arg, COMPLEX_TYPE)
7780 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7782 tree tmp;
7784 /* Calculate the result when the argument is a constant. */
7785 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7786 return tmp;
7788 /* Optimize fn(-x) into fn(x). */
7789 if ((tmp = fold_strip_sign_ops (arg)))
7790 return build_call_expr_loc (loc, fndecl, 1, tmp);
7793 return NULL_TREE;
7796 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7797 Return NULL_TREE if no simplification can be made. */
7799 static tree
7800 fold_builtin_tan (tree arg, tree type)
7802 enum built_in_function fcode;
7803 tree res;
7805 if (!validate_arg (arg, REAL_TYPE))
7806 return NULL_TREE;
7808 /* Calculate the result when the argument is a constant. */
7809 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7810 return res;
7812 /* Optimize tan(atan(x)) = x. */
7813 fcode = builtin_mathfn_code (arg);
7814 if (flag_unsafe_math_optimizations
7815 && (fcode == BUILT_IN_ATAN
7816 || fcode == BUILT_IN_ATANF
7817 || fcode == BUILT_IN_ATANL))
7818 return CALL_EXPR_ARG (arg, 0);
7820 return NULL_TREE;
7823 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7824 NULL_TREE if no simplification can be made. */
7826 static tree
7827 fold_builtin_sincos (location_t loc,
7828 tree arg0, tree arg1, tree arg2)
7830 tree type;
7831 tree res, fn, call;
7833 if (!validate_arg (arg0, REAL_TYPE)
7834 || !validate_arg (arg1, POINTER_TYPE)
7835 || !validate_arg (arg2, POINTER_TYPE))
7836 return NULL_TREE;
7838 type = TREE_TYPE (arg0);
7840 /* Calculate the result when the argument is a constant. */
7841 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7842 return res;
7844 /* Canonicalize sincos to cexpi. */
7845 if (!targetm.libc_has_function (function_c99_math_complex))
7846 return NULL_TREE;
7847 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7848 if (!fn)
7849 return NULL_TREE;
7851 call = build_call_expr_loc (loc, fn, 1, arg0);
7852 call = builtin_save_expr (call);
7854 return build2 (COMPOUND_EXPR, void_type_node,
7855 build2 (MODIFY_EXPR, void_type_node,
7856 build_fold_indirect_ref_loc (loc, arg1),
7857 build1 (IMAGPART_EXPR, type, call)),
7858 build2 (MODIFY_EXPR, void_type_node,
7859 build_fold_indirect_ref_loc (loc, arg2),
7860 build1 (REALPART_EXPR, type, call)));
7863 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7864 NULL_TREE if no simplification can be made. */
7866 static tree
7867 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7869 tree rtype;
7870 tree realp, imagp, ifn;
7871 tree res;
7873 if (!validate_arg (arg0, COMPLEX_TYPE)
7874 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7875 return NULL_TREE;
7877 /* Calculate the result when the argument is a constant. */
7878 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7879 return res;
7881 rtype = TREE_TYPE (TREE_TYPE (arg0));
7883 /* In case we can figure out the real part of arg0 and it is constant zero
7884 fold to cexpi. */
7885 if (!targetm.libc_has_function (function_c99_math_complex))
7886 return NULL_TREE;
7887 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7888 if (!ifn)
7889 return NULL_TREE;
7891 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7892 && real_zerop (realp))
7894 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7895 return build_call_expr_loc (loc, ifn, 1, narg);
7898 /* In case we can easily decompose real and imaginary parts split cexp
7899 to exp (r) * cexpi (i). */
7900 if (flag_unsafe_math_optimizations
7901 && realp)
7903 tree rfn, rcall, icall;
7905 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7906 if (!rfn)
7907 return NULL_TREE;
7909 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7910 if (!imagp)
7911 return NULL_TREE;
7913 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7914 icall = builtin_save_expr (icall);
7915 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7916 rcall = builtin_save_expr (rcall);
7917 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7918 fold_build2_loc (loc, MULT_EXPR, rtype,
7919 rcall,
7920 fold_build1_loc (loc, REALPART_EXPR,
7921 rtype, icall)),
7922 fold_build2_loc (loc, MULT_EXPR, rtype,
7923 rcall,
7924 fold_build1_loc (loc, IMAGPART_EXPR,
7925 rtype, icall)));
7928 return NULL_TREE;
7931 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7932 Return NULL_TREE if no simplification can be made. */
7934 static tree
7935 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7937 if (!validate_arg (arg, REAL_TYPE))
7938 return NULL_TREE;
7940 /* Optimize trunc of constant value. */
7941 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7943 REAL_VALUE_TYPE r, x;
7944 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7946 x = TREE_REAL_CST (arg);
7947 real_trunc (&r, TYPE_MODE (type), &x);
7948 return build_real (type, r);
7951 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7954 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7955 Return NULL_TREE if no simplification can be made. */
7957 static tree
7958 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7960 if (!validate_arg (arg, REAL_TYPE))
7961 return NULL_TREE;
7963 /* Optimize floor of constant value. */
7964 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7966 REAL_VALUE_TYPE x;
7968 x = TREE_REAL_CST (arg);
7969 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7971 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7972 REAL_VALUE_TYPE r;
7974 real_floor (&r, TYPE_MODE (type), &x);
7975 return build_real (type, r);
7979 /* Fold floor (x) where x is nonnegative to trunc (x). */
7980 if (tree_expr_nonnegative_p (arg))
7982 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7983 if (truncfn)
7984 return build_call_expr_loc (loc, truncfn, 1, arg);
7987 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7990 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7991 Return NULL_TREE if no simplification can be made. */
7993 static tree
7994 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7996 if (!validate_arg (arg, REAL_TYPE))
7997 return NULL_TREE;
7999 /* Optimize ceil of constant value. */
8000 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8002 REAL_VALUE_TYPE x;
8004 x = TREE_REAL_CST (arg);
8005 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8007 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8008 REAL_VALUE_TYPE r;
8010 real_ceil (&r, TYPE_MODE (type), &x);
8011 return build_real (type, r);
8015 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8018 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8019 Return NULL_TREE if no simplification can be made. */
8021 static tree
8022 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8024 if (!validate_arg (arg, REAL_TYPE))
8025 return NULL_TREE;
8027 /* Optimize round of constant value. */
8028 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8030 REAL_VALUE_TYPE x;
8032 x = TREE_REAL_CST (arg);
8033 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8035 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8036 REAL_VALUE_TYPE r;
8038 real_round (&r, TYPE_MODE (type), &x);
8039 return build_real (type, r);
8043 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8046 /* Fold function call to builtin lround, lroundf or lroundl (or the
8047 corresponding long long versions) and other rounding functions. ARG
8048 is the argument to the call. Return NULL_TREE if no simplification
8049 can be made. */
8051 static tree
8052 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8054 if (!validate_arg (arg, REAL_TYPE))
8055 return NULL_TREE;
8057 /* Optimize lround of constant value. */
8058 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8060 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8062 if (real_isfinite (&x))
8064 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8065 tree ftype = TREE_TYPE (arg);
8066 REAL_VALUE_TYPE r;
8067 bool fail = false;
8069 switch (DECL_FUNCTION_CODE (fndecl))
8071 CASE_FLT_FN (BUILT_IN_IFLOOR):
8072 CASE_FLT_FN (BUILT_IN_LFLOOR):
8073 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8074 real_floor (&r, TYPE_MODE (ftype), &x);
8075 break;
8077 CASE_FLT_FN (BUILT_IN_ICEIL):
8078 CASE_FLT_FN (BUILT_IN_LCEIL):
8079 CASE_FLT_FN (BUILT_IN_LLCEIL):
8080 real_ceil (&r, TYPE_MODE (ftype), &x);
8081 break;
8083 CASE_FLT_FN (BUILT_IN_IROUND):
8084 CASE_FLT_FN (BUILT_IN_LROUND):
8085 CASE_FLT_FN (BUILT_IN_LLROUND):
8086 real_round (&r, TYPE_MODE (ftype), &x);
8087 break;
8089 default:
8090 gcc_unreachable ();
8093 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8094 if (!fail)
8095 return wide_int_to_tree (itype, val);
8099 switch (DECL_FUNCTION_CODE (fndecl))
8101 CASE_FLT_FN (BUILT_IN_LFLOOR):
8102 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8103 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8104 if (tree_expr_nonnegative_p (arg))
8105 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8106 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8107 break;
8108 default:;
8111 return fold_fixed_mathfn (loc, fndecl, arg);
8114 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8115 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8116 the argument to the call. Return NULL_TREE if no simplification can
8117 be made. */
8119 static tree
8120 fold_builtin_bitop (tree fndecl, tree arg)
8122 if (!validate_arg (arg, INTEGER_TYPE))
8123 return NULL_TREE;
8125 /* Optimize for constant argument. */
8126 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8128 tree type = TREE_TYPE (arg);
8129 int result;
8131 switch (DECL_FUNCTION_CODE (fndecl))
8133 CASE_INT_FN (BUILT_IN_FFS):
8134 result = wi::ffs (arg);
8135 break;
8137 CASE_INT_FN (BUILT_IN_CLZ):
8138 if (wi::ne_p (arg, 0))
8139 result = wi::clz (arg);
8140 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8141 result = TYPE_PRECISION (type);
8142 break;
8144 CASE_INT_FN (BUILT_IN_CTZ):
8145 if (wi::ne_p (arg, 0))
8146 result = wi::ctz (arg);
8147 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8148 result = TYPE_PRECISION (type);
8149 break;
8151 CASE_INT_FN (BUILT_IN_CLRSB):
8152 result = wi::clrsb (arg);
8153 break;
8155 CASE_INT_FN (BUILT_IN_POPCOUNT):
8156 result = wi::popcount (arg);
8157 break;
8159 CASE_INT_FN (BUILT_IN_PARITY):
8160 result = wi::parity (arg);
8161 break;
8163 default:
8164 gcc_unreachable ();
8167 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8170 return NULL_TREE;
8173 /* Fold function call to builtin_bswap and the short, long and long long
8174 variants. Return NULL_TREE if no simplification can be made. */
8175 static tree
8176 fold_builtin_bswap (tree fndecl, tree arg)
8178 if (! validate_arg (arg, INTEGER_TYPE))
8179 return NULL_TREE;
8181 /* Optimize constant value. */
8182 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8184 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8186 switch (DECL_FUNCTION_CODE (fndecl))
8188 case BUILT_IN_BSWAP16:
8189 case BUILT_IN_BSWAP32:
8190 case BUILT_IN_BSWAP64:
8192 signop sgn = TYPE_SIGN (type);
8193 tree result =
8194 wide_int_to_tree (type,
8195 wide_int::from (arg, TYPE_PRECISION (type),
8196 sgn).bswap ());
8197 return result;
8199 default:
8200 gcc_unreachable ();
8204 return NULL_TREE;
8207 /* A subroutine of fold_builtin to fold the various logarithmic
8208 functions. Return NULL_TREE if no simplification can me made.
8209 FUNC is the corresponding MPFR logarithm function. */
8211 static tree
8212 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8213 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8215 if (validate_arg (arg, REAL_TYPE))
8217 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8218 tree res;
8219 const enum built_in_function fcode = builtin_mathfn_code (arg);
8221 /* Calculate the result when the argument is a constant. */
8222 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8223 return res;
8225 /* Special case, optimize logN(expN(x)) = x. */
8226 if (flag_unsafe_math_optimizations
8227 && ((func == mpfr_log
8228 && (fcode == BUILT_IN_EXP
8229 || fcode == BUILT_IN_EXPF
8230 || fcode == BUILT_IN_EXPL))
8231 || (func == mpfr_log2
8232 && (fcode == BUILT_IN_EXP2
8233 || fcode == BUILT_IN_EXP2F
8234 || fcode == BUILT_IN_EXP2L))
8235 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8236 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8238 /* Optimize logN(func()) for various exponential functions. We
8239 want to determine the value "x" and the power "exponent" in
8240 order to transform logN(x**exponent) into exponent*logN(x). */
8241 if (flag_unsafe_math_optimizations)
8243 tree exponent = 0, x = 0;
8245 switch (fcode)
8247 CASE_FLT_FN (BUILT_IN_EXP):
8248 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8249 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8250 dconst_e ()));
8251 exponent = CALL_EXPR_ARG (arg, 0);
8252 break;
8253 CASE_FLT_FN (BUILT_IN_EXP2):
8254 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8255 x = build_real (type, dconst2);
8256 exponent = CALL_EXPR_ARG (arg, 0);
8257 break;
8258 CASE_FLT_FN (BUILT_IN_EXP10):
8259 CASE_FLT_FN (BUILT_IN_POW10):
8260 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8262 REAL_VALUE_TYPE dconst10;
8263 real_from_integer (&dconst10, VOIDmode, 10, SIGNED);
8264 x = build_real (type, dconst10);
8266 exponent = CALL_EXPR_ARG (arg, 0);
8267 break;
8268 CASE_FLT_FN (BUILT_IN_SQRT):
8269 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8270 x = CALL_EXPR_ARG (arg, 0);
8271 exponent = build_real (type, dconsthalf);
8272 break;
8273 CASE_FLT_FN (BUILT_IN_CBRT):
8274 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8275 x = CALL_EXPR_ARG (arg, 0);
8276 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8277 dconst_third ()));
8278 break;
8279 CASE_FLT_FN (BUILT_IN_POW):
8280 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8281 x = CALL_EXPR_ARG (arg, 0);
8282 exponent = CALL_EXPR_ARG (arg, 1);
8283 break;
8284 default:
8285 break;
8288 /* Now perform the optimization. */
8289 if (x && exponent)
8291 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8292 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8297 return NULL_TREE;
8300 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8301 NULL_TREE if no simplification can be made. */
8303 static tree
8304 fold_builtin_hypot (location_t loc, tree fndecl,
8305 tree arg0, tree arg1, tree type)
8307 tree res, narg0, narg1;
8309 if (!validate_arg (arg0, REAL_TYPE)
8310 || !validate_arg (arg1, REAL_TYPE))
8311 return NULL_TREE;
8313 /* Calculate the result when the argument is a constant. */
8314 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8315 return res;
8317 /* If either argument to hypot has a negate or abs, strip that off.
8318 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8319 narg0 = fold_strip_sign_ops (arg0);
8320 narg1 = fold_strip_sign_ops (arg1);
8321 if (narg0 || narg1)
8323 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8324 narg1 ? narg1 : arg1);
8327 /* If either argument is zero, hypot is fabs of the other. */
8328 if (real_zerop (arg0))
8329 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8330 else if (real_zerop (arg1))
8331 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8333 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8334 if (flag_unsafe_math_optimizations
8335 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8337 const REAL_VALUE_TYPE sqrt2_trunc
8338 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8339 return fold_build2_loc (loc, MULT_EXPR, type,
8340 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8341 build_real (type, sqrt2_trunc));
8344 return NULL_TREE;
8348 /* Fold a builtin function call to pow, powf, or powl. Return
8349 NULL_TREE if no simplification can be made. */
8350 static tree
8351 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8353 tree res;
8355 if (!validate_arg (arg0, REAL_TYPE)
8356 || !validate_arg (arg1, REAL_TYPE))
8357 return NULL_TREE;
8359 /* Calculate the result when the argument is a constant. */
8360 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8361 return res;
8363 /* Optimize pow(1.0,y) = 1.0. */
8364 if (real_onep (arg0))
8365 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8367 if (TREE_CODE (arg1) == REAL_CST
8368 && !TREE_OVERFLOW (arg1))
8370 REAL_VALUE_TYPE cint;
8371 REAL_VALUE_TYPE c;
8372 HOST_WIDE_INT n;
8374 c = TREE_REAL_CST (arg1);
8376 /* Optimize pow(x,0.0) = 1.0. */
8377 if (REAL_VALUES_EQUAL (c, dconst0))
8378 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8379 arg0);
8381 /* Optimize pow(x,1.0) = x. */
8382 if (REAL_VALUES_EQUAL (c, dconst1))
8383 return arg0;
8385 /* Optimize pow(x,-1.0) = 1.0/x. */
8386 if (REAL_VALUES_EQUAL (c, dconstm1))
8387 return fold_build2_loc (loc, RDIV_EXPR, type,
8388 build_real (type, dconst1), arg0);
8390 /* Optimize pow(x,0.5) = sqrt(x). */
8391 if (flag_unsafe_math_optimizations
8392 && REAL_VALUES_EQUAL (c, dconsthalf))
8394 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8396 if (sqrtfn != NULL_TREE)
8397 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8400 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8401 if (flag_unsafe_math_optimizations)
8403 const REAL_VALUE_TYPE dconstroot
8404 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8406 if (REAL_VALUES_EQUAL (c, dconstroot))
8408 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8409 if (cbrtfn != NULL_TREE)
8410 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8414 /* Check for an integer exponent. */
8415 n = real_to_integer (&c);
8416 real_from_integer (&cint, VOIDmode, n, SIGNED);
8417 if (real_identical (&c, &cint))
8419 /* Attempt to evaluate pow at compile-time, unless this should
8420 raise an exception. */
8421 if (TREE_CODE (arg0) == REAL_CST
8422 && !TREE_OVERFLOW (arg0)
8423 && (n > 0
8424 || (!flag_trapping_math && !flag_errno_math)
8425 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8427 REAL_VALUE_TYPE x;
8428 bool inexact;
8430 x = TREE_REAL_CST (arg0);
8431 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8432 if (flag_unsafe_math_optimizations || !inexact)
8433 return build_real (type, x);
8436 /* Strip sign ops from even integer powers. */
8437 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8439 tree narg0 = fold_strip_sign_ops (arg0);
8440 if (narg0)
8441 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8446 if (flag_unsafe_math_optimizations)
8448 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8450 /* Optimize pow(expN(x),y) = expN(x*y). */
8451 if (BUILTIN_EXPONENT_P (fcode))
8453 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8454 tree arg = CALL_EXPR_ARG (arg0, 0);
8455 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8456 return build_call_expr_loc (loc, expfn, 1, arg);
8459 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8460 if (BUILTIN_SQRT_P (fcode))
8462 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8463 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8464 build_real (type, dconsthalf));
8465 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8468 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8469 if (BUILTIN_CBRT_P (fcode))
8471 tree arg = CALL_EXPR_ARG (arg0, 0);
8472 if (tree_expr_nonnegative_p (arg))
8474 const REAL_VALUE_TYPE dconstroot
8475 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8476 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8477 build_real (type, dconstroot));
8478 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8482 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8483 if (fcode == BUILT_IN_POW
8484 || fcode == BUILT_IN_POWF
8485 || fcode == BUILT_IN_POWL)
8487 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8488 if (tree_expr_nonnegative_p (arg00))
8490 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8491 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8492 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8497 return NULL_TREE;
8500 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8501 Return NULL_TREE if no simplification can be made. */
8502 static tree
8503 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8504 tree arg0, tree arg1, tree type)
8506 if (!validate_arg (arg0, REAL_TYPE)
8507 || !validate_arg (arg1, INTEGER_TYPE))
8508 return NULL_TREE;
8510 /* Optimize pow(1.0,y) = 1.0. */
8511 if (real_onep (arg0))
8512 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8514 if (tree_fits_shwi_p (arg1))
8516 HOST_WIDE_INT c = tree_to_shwi (arg1);
8518 /* Evaluate powi at compile-time. */
8519 if (TREE_CODE (arg0) == REAL_CST
8520 && !TREE_OVERFLOW (arg0))
8522 REAL_VALUE_TYPE x;
8523 x = TREE_REAL_CST (arg0);
8524 real_powi (&x, TYPE_MODE (type), &x, c);
8525 return build_real (type, x);
8528 /* Optimize pow(x,0) = 1.0. */
8529 if (c == 0)
8530 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8531 arg0);
8533 /* Optimize pow(x,1) = x. */
8534 if (c == 1)
8535 return arg0;
8537 /* Optimize pow(x,-1) = 1.0/x. */
8538 if (c == -1)
8539 return fold_build2_loc (loc, RDIV_EXPR, type,
8540 build_real (type, dconst1), arg0);
8543 return NULL_TREE;
8546 /* A subroutine of fold_builtin to fold the various exponent
8547 functions. Return NULL_TREE if no simplification can be made.
8548 FUNC is the corresponding MPFR exponent function. */
8550 static tree
8551 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8552 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8554 if (validate_arg (arg, REAL_TYPE))
8556 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8557 tree res;
8559 /* Calculate the result when the argument is a constant. */
8560 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8561 return res;
8563 /* Optimize expN(logN(x)) = x. */
8564 if (flag_unsafe_math_optimizations)
8566 const enum built_in_function fcode = builtin_mathfn_code (arg);
8568 if ((func == mpfr_exp
8569 && (fcode == BUILT_IN_LOG
8570 || fcode == BUILT_IN_LOGF
8571 || fcode == BUILT_IN_LOGL))
8572 || (func == mpfr_exp2
8573 && (fcode == BUILT_IN_LOG2
8574 || fcode == BUILT_IN_LOG2F
8575 || fcode == BUILT_IN_LOG2L))
8576 || (func == mpfr_exp10
8577 && (fcode == BUILT_IN_LOG10
8578 || fcode == BUILT_IN_LOG10F
8579 || fcode == BUILT_IN_LOG10L)))
8580 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8584 return NULL_TREE;
8587 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8588 Return NULL_TREE if no simplification can be made. */
8590 static tree
8591 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8593 tree fn, len, lenp1, call, type;
8595 if (!validate_arg (dest, POINTER_TYPE)
8596 || !validate_arg (src, POINTER_TYPE))
8597 return NULL_TREE;
8599 len = c_strlen (src, 1);
8600 if (!len
8601 || TREE_CODE (len) != INTEGER_CST)
8602 return NULL_TREE;
8604 if (optimize_function_for_size_p (cfun)
8605 /* If length is zero it's small enough. */
8606 && !integer_zerop (len))
8607 return NULL_TREE;
8609 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8610 if (!fn)
8611 return NULL_TREE;
8613 lenp1 = size_binop_loc (loc, PLUS_EXPR,
8614 fold_convert_loc (loc, size_type_node, len),
8615 build_int_cst (size_type_node, 1));
8616 /* We use dest twice in building our expression. Save it from
8617 multiple expansions. */
8618 dest = builtin_save_expr (dest);
8619 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8621 type = TREE_TYPE (TREE_TYPE (fndecl));
8622 dest = fold_build_pointer_plus_loc (loc, dest, len);
8623 dest = fold_convert_loc (loc, type, dest);
8624 dest = omit_one_operand_loc (loc, type, dest, call);
8625 return dest;
8628 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8629 arguments to the call, and TYPE is its return type.
8630 Return NULL_TREE if no simplification can be made. */
8632 static tree
8633 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8635 if (!validate_arg (arg1, POINTER_TYPE)
8636 || !validate_arg (arg2, INTEGER_TYPE)
8637 || !validate_arg (len, INTEGER_TYPE))
8638 return NULL_TREE;
8639 else
8641 const char *p1;
8643 if (TREE_CODE (arg2) != INTEGER_CST
8644 || !tree_fits_uhwi_p (len))
8645 return NULL_TREE;
8647 p1 = c_getstr (arg1);
8648 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8650 char c;
8651 const char *r;
8652 tree tem;
8654 if (target_char_cast (arg2, &c))
8655 return NULL_TREE;
8657 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8659 if (r == NULL)
8660 return build_int_cst (TREE_TYPE (arg1), 0);
8662 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8663 return fold_convert_loc (loc, type, tem);
8665 return NULL_TREE;
8669 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8670 Return NULL_TREE if no simplification can be made. */
8672 static tree
8673 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8675 const char *p1, *p2;
8677 if (!validate_arg (arg1, POINTER_TYPE)
8678 || !validate_arg (arg2, POINTER_TYPE)
8679 || !validate_arg (len, INTEGER_TYPE))
8680 return NULL_TREE;
8682 /* If the LEN parameter is zero, return zero. */
8683 if (integer_zerop (len))
8684 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8685 arg1, arg2);
8687 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8688 if (operand_equal_p (arg1, arg2, 0))
8689 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8691 p1 = c_getstr (arg1);
8692 p2 = c_getstr (arg2);
8694 /* If all arguments are constant, and the value of len is not greater
8695 than the lengths of arg1 and arg2, evaluate at compile-time. */
8696 if (tree_fits_uhwi_p (len) && p1 && p2
8697 && compare_tree_int (len, strlen (p1) + 1) <= 0
8698 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8700 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8702 if (r > 0)
8703 return integer_one_node;
8704 else if (r < 0)
8705 return integer_minus_one_node;
8706 else
8707 return integer_zero_node;
8710 /* If len parameter is one, return an expression corresponding to
8711 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8712 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8714 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8715 tree cst_uchar_ptr_node
8716 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8718 tree ind1
8719 = fold_convert_loc (loc, integer_type_node,
8720 build1 (INDIRECT_REF, cst_uchar_node,
8721 fold_convert_loc (loc,
8722 cst_uchar_ptr_node,
8723 arg1)));
8724 tree ind2
8725 = fold_convert_loc (loc, integer_type_node,
8726 build1 (INDIRECT_REF, cst_uchar_node,
8727 fold_convert_loc (loc,
8728 cst_uchar_ptr_node,
8729 arg2)));
8730 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8733 return NULL_TREE;
8736 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8737 Return NULL_TREE if no simplification can be made. */
8739 static tree
8740 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8742 const char *p1, *p2;
8744 if (!validate_arg (arg1, POINTER_TYPE)
8745 || !validate_arg (arg2, POINTER_TYPE))
8746 return NULL_TREE;
8748 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8749 if (operand_equal_p (arg1, arg2, 0))
8750 return integer_zero_node;
8752 p1 = c_getstr (arg1);
8753 p2 = c_getstr (arg2);
8755 if (p1 && p2)
8757 const int i = strcmp (p1, p2);
8758 if (i < 0)
8759 return integer_minus_one_node;
8760 else if (i > 0)
8761 return integer_one_node;
8762 else
8763 return integer_zero_node;
8766 /* If the second arg is "", return *(const unsigned char*)arg1. */
8767 if (p2 && *p2 == '\0')
8769 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8770 tree cst_uchar_ptr_node
8771 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8773 return fold_convert_loc (loc, integer_type_node,
8774 build1 (INDIRECT_REF, cst_uchar_node,
8775 fold_convert_loc (loc,
8776 cst_uchar_ptr_node,
8777 arg1)));
8780 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8781 if (p1 && *p1 == '\0')
8783 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8784 tree cst_uchar_ptr_node
8785 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8787 tree temp
8788 = fold_convert_loc (loc, integer_type_node,
8789 build1 (INDIRECT_REF, cst_uchar_node,
8790 fold_convert_loc (loc,
8791 cst_uchar_ptr_node,
8792 arg2)));
8793 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8796 return NULL_TREE;
8799 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8800 Return NULL_TREE if no simplification can be made. */
8802 static tree
8803 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8805 const char *p1, *p2;
8807 if (!validate_arg (arg1, POINTER_TYPE)
8808 || !validate_arg (arg2, POINTER_TYPE)
8809 || !validate_arg (len, INTEGER_TYPE))
8810 return NULL_TREE;
8812 /* If the LEN parameter is zero, return zero. */
8813 if (integer_zerop (len))
8814 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8815 arg1, arg2);
8817 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8818 if (operand_equal_p (arg1, arg2, 0))
8819 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8821 p1 = c_getstr (arg1);
8822 p2 = c_getstr (arg2);
8824 if (tree_fits_uhwi_p (len) && p1 && p2)
8826 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8827 if (i > 0)
8828 return integer_one_node;
8829 else if (i < 0)
8830 return integer_minus_one_node;
8831 else
8832 return integer_zero_node;
8835 /* If the second arg is "", and the length is greater than zero,
8836 return *(const unsigned char*)arg1. */
8837 if (p2 && *p2 == '\0'
8838 && TREE_CODE (len) == INTEGER_CST
8839 && tree_int_cst_sgn (len) == 1)
8841 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8842 tree cst_uchar_ptr_node
8843 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8845 return fold_convert_loc (loc, integer_type_node,
8846 build1 (INDIRECT_REF, cst_uchar_node,
8847 fold_convert_loc (loc,
8848 cst_uchar_ptr_node,
8849 arg1)));
8852 /* If the first arg is "", and the length is greater than zero,
8853 return -*(const unsigned char*)arg2. */
8854 if (p1 && *p1 == '\0'
8855 && TREE_CODE (len) == INTEGER_CST
8856 && tree_int_cst_sgn (len) == 1)
8858 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8859 tree cst_uchar_ptr_node
8860 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8862 tree temp = fold_convert_loc (loc, integer_type_node,
8863 build1 (INDIRECT_REF, cst_uchar_node,
8864 fold_convert_loc (loc,
8865 cst_uchar_ptr_node,
8866 arg2)));
8867 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8870 /* If len parameter is one, return an expression corresponding to
8871 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8872 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8874 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8875 tree cst_uchar_ptr_node
8876 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8878 tree ind1 = fold_convert_loc (loc, integer_type_node,
8879 build1 (INDIRECT_REF, cst_uchar_node,
8880 fold_convert_loc (loc,
8881 cst_uchar_ptr_node,
8882 arg1)));
8883 tree ind2 = fold_convert_loc (loc, integer_type_node,
8884 build1 (INDIRECT_REF, cst_uchar_node,
8885 fold_convert_loc (loc,
8886 cst_uchar_ptr_node,
8887 arg2)));
8888 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8891 return NULL_TREE;
8894 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8895 ARG. Return NULL_TREE if no simplification can be made. */
8897 static tree
8898 fold_builtin_signbit (location_t loc, tree arg, tree type)
8900 if (!validate_arg (arg, REAL_TYPE))
8901 return NULL_TREE;
8903 /* If ARG is a compile-time constant, determine the result. */
8904 if (TREE_CODE (arg) == REAL_CST
8905 && !TREE_OVERFLOW (arg))
8907 REAL_VALUE_TYPE c;
8909 c = TREE_REAL_CST (arg);
8910 return (REAL_VALUE_NEGATIVE (c)
8911 ? build_one_cst (type)
8912 : build_zero_cst (type));
8915 /* If ARG is non-negative, the result is always zero. */
8916 if (tree_expr_nonnegative_p (arg))
8917 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8919 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8920 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8921 return fold_convert (type,
8922 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8923 build_real (TREE_TYPE (arg), dconst0)));
8925 return NULL_TREE;
8928 /* Fold function call to builtin copysign, copysignf or copysignl with
8929 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8930 be made. */
8932 static tree
8933 fold_builtin_copysign (location_t loc, tree fndecl,
8934 tree arg1, tree arg2, tree type)
8936 tree tem;
8938 if (!validate_arg (arg1, REAL_TYPE)
8939 || !validate_arg (arg2, REAL_TYPE))
8940 return NULL_TREE;
8942 /* copysign(X,X) is X. */
8943 if (operand_equal_p (arg1, arg2, 0))
8944 return fold_convert_loc (loc, type, arg1);
8946 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8947 if (TREE_CODE (arg1) == REAL_CST
8948 && TREE_CODE (arg2) == REAL_CST
8949 && !TREE_OVERFLOW (arg1)
8950 && !TREE_OVERFLOW (arg2))
8952 REAL_VALUE_TYPE c1, c2;
8954 c1 = TREE_REAL_CST (arg1);
8955 c2 = TREE_REAL_CST (arg2);
8956 /* c1.sign := c2.sign. */
8957 real_copysign (&c1, &c2);
8958 return build_real (type, c1);
8961 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8962 Remember to evaluate Y for side-effects. */
8963 if (tree_expr_nonnegative_p (arg2))
8964 return omit_one_operand_loc (loc, type,
8965 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8966 arg2);
8968 /* Strip sign changing operations for the first argument. */
8969 tem = fold_strip_sign_ops (arg1);
8970 if (tem)
8971 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8973 return NULL_TREE;
8976 /* Fold a call to builtin isascii with argument ARG. */
8978 static tree
8979 fold_builtin_isascii (location_t loc, tree arg)
8981 if (!validate_arg (arg, INTEGER_TYPE))
8982 return NULL_TREE;
8983 else
8985 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8986 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8987 build_int_cst (integer_type_node,
8988 ~ (unsigned HOST_WIDE_INT) 0x7f));
8989 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8990 arg, integer_zero_node);
8994 /* Fold a call to builtin toascii with argument ARG. */
8996 static tree
8997 fold_builtin_toascii (location_t loc, tree arg)
8999 if (!validate_arg (arg, INTEGER_TYPE))
9000 return NULL_TREE;
9002 /* Transform toascii(c) -> (c & 0x7f). */
9003 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9004 build_int_cst (integer_type_node, 0x7f));
9007 /* Fold a call to builtin isdigit with argument ARG. */
9009 static tree
9010 fold_builtin_isdigit (location_t loc, tree arg)
9012 if (!validate_arg (arg, INTEGER_TYPE))
9013 return NULL_TREE;
9014 else
9016 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9017 /* According to the C standard, isdigit is unaffected by locale.
9018 However, it definitely is affected by the target character set. */
9019 unsigned HOST_WIDE_INT target_digit0
9020 = lang_hooks.to_target_charset ('0');
9022 if (target_digit0 == 0)
9023 return NULL_TREE;
9025 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9026 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9027 build_int_cst (unsigned_type_node, target_digit0));
9028 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9029 build_int_cst (unsigned_type_node, 9));
9033 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9035 static tree
9036 fold_builtin_fabs (location_t loc, tree arg, tree type)
9038 if (!validate_arg (arg, REAL_TYPE))
9039 return NULL_TREE;
9041 arg = fold_convert_loc (loc, type, arg);
9042 if (TREE_CODE (arg) == REAL_CST)
9043 return fold_abs_const (arg, type);
9044 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9047 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9049 static tree
9050 fold_builtin_abs (location_t loc, tree arg, tree type)
9052 if (!validate_arg (arg, INTEGER_TYPE))
9053 return NULL_TREE;
9055 arg = fold_convert_loc (loc, type, arg);
9056 if (TREE_CODE (arg) == INTEGER_CST)
9057 return fold_abs_const (arg, type);
9058 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9061 /* Fold a fma operation with arguments ARG[012]. */
9063 tree
9064 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9065 tree type, tree arg0, tree arg1, tree arg2)
9067 if (TREE_CODE (arg0) == REAL_CST
9068 && TREE_CODE (arg1) == REAL_CST
9069 && TREE_CODE (arg2) == REAL_CST)
9070 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9072 return NULL_TREE;
9075 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9077 static tree
9078 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9080 if (validate_arg (arg0, REAL_TYPE)
9081 && validate_arg (arg1, REAL_TYPE)
9082 && validate_arg (arg2, REAL_TYPE))
9084 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9085 if (tem)
9086 return tem;
9088 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9089 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9090 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9092 return NULL_TREE;
9095 /* Fold a call to builtin fmin or fmax. */
9097 static tree
9098 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9099 tree type, bool max)
9101 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9103 /* Calculate the result when the argument is a constant. */
9104 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9106 if (res)
9107 return res;
9109 /* If either argument is NaN, return the other one. Avoid the
9110 transformation if we get (and honor) a signalling NaN. Using
9111 omit_one_operand() ensures we create a non-lvalue. */
9112 if (TREE_CODE (arg0) == REAL_CST
9113 && real_isnan (&TREE_REAL_CST (arg0))
9114 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9115 || ! TREE_REAL_CST (arg0).signalling))
9116 return omit_one_operand_loc (loc, type, arg1, arg0);
9117 if (TREE_CODE (arg1) == REAL_CST
9118 && real_isnan (&TREE_REAL_CST (arg1))
9119 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9120 || ! TREE_REAL_CST (arg1).signalling))
9121 return omit_one_operand_loc (loc, type, arg0, arg1);
9123 /* Transform fmin/fmax(x,x) -> x. */
9124 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9125 return omit_one_operand_loc (loc, type, arg0, arg1);
9127 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9128 functions to return the numeric arg if the other one is NaN.
9129 These tree codes don't honor that, so only transform if
9130 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9131 handled, so we don't have to worry about it either. */
9132 if (flag_finite_math_only)
9133 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9134 fold_convert_loc (loc, type, arg0),
9135 fold_convert_loc (loc, type, arg1));
9137 return NULL_TREE;
9140 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9142 static tree
9143 fold_builtin_carg (location_t loc, tree arg, tree type)
9145 if (validate_arg (arg, COMPLEX_TYPE)
9146 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9148 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9150 if (atan2_fn)
9152 tree new_arg = builtin_save_expr (arg);
9153 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9154 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9155 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9159 return NULL_TREE;
9162 /* Fold a call to builtin logb/ilogb. */
9164 static tree
9165 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9167 if (! validate_arg (arg, REAL_TYPE))
9168 return NULL_TREE;
9170 STRIP_NOPS (arg);
9172 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9174 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9176 switch (value->cl)
9178 case rvc_nan:
9179 case rvc_inf:
9180 /* If arg is Inf or NaN and we're logb, return it. */
9181 if (TREE_CODE (rettype) == REAL_TYPE)
9183 /* For logb(-Inf) we have to return +Inf. */
9184 if (real_isinf (value) && real_isneg (value))
9186 REAL_VALUE_TYPE tem;
9187 real_inf (&tem);
9188 return build_real (rettype, tem);
9190 return fold_convert_loc (loc, rettype, arg);
9192 /* Fall through... */
9193 case rvc_zero:
9194 /* Zero may set errno and/or raise an exception for logb, also
9195 for ilogb we don't know FP_ILOGB0. */
9196 return NULL_TREE;
9197 case rvc_normal:
9198 /* For normal numbers, proceed iff radix == 2. In GCC,
9199 normalized significands are in the range [0.5, 1.0). We
9200 want the exponent as if they were [1.0, 2.0) so get the
9201 exponent and subtract 1. */
9202 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9203 return fold_convert_loc (loc, rettype,
9204 build_int_cst (integer_type_node,
9205 REAL_EXP (value)-1));
9206 break;
9210 return NULL_TREE;
9213 /* Fold a call to builtin significand, if radix == 2. */
9215 static tree
9216 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9218 if (! validate_arg (arg, REAL_TYPE))
9219 return NULL_TREE;
9221 STRIP_NOPS (arg);
9223 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9225 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9227 switch (value->cl)
9229 case rvc_zero:
9230 case rvc_nan:
9231 case rvc_inf:
9232 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9233 return fold_convert_loc (loc, rettype, arg);
9234 case rvc_normal:
9235 /* For normal numbers, proceed iff radix == 2. */
9236 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9238 REAL_VALUE_TYPE result = *value;
9239 /* In GCC, normalized significands are in the range [0.5,
9240 1.0). We want them to be [1.0, 2.0) so set the
9241 exponent to 1. */
9242 SET_REAL_EXP (&result, 1);
9243 return build_real (rettype, result);
9245 break;
9249 return NULL_TREE;
9252 /* Fold a call to builtin frexp, we can assume the base is 2. */
9254 static tree
9255 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9257 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9258 return NULL_TREE;
9260 STRIP_NOPS (arg0);
9262 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9263 return NULL_TREE;
9265 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9267 /* Proceed if a valid pointer type was passed in. */
9268 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9270 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9271 tree frac, exp;
9273 switch (value->cl)
9275 case rvc_zero:
9276 /* For +-0, return (*exp = 0, +-0). */
9277 exp = integer_zero_node;
9278 frac = arg0;
9279 break;
9280 case rvc_nan:
9281 case rvc_inf:
9282 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9283 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9284 case rvc_normal:
9286 /* Since the frexp function always expects base 2, and in
9287 GCC normalized significands are already in the range
9288 [0.5, 1.0), we have exactly what frexp wants. */
9289 REAL_VALUE_TYPE frac_rvt = *value;
9290 SET_REAL_EXP (&frac_rvt, 0);
9291 frac = build_real (rettype, frac_rvt);
9292 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9294 break;
9295 default:
9296 gcc_unreachable ();
9299 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9300 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9301 TREE_SIDE_EFFECTS (arg1) = 1;
9302 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9305 return NULL_TREE;
9308 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9309 then we can assume the base is two. If it's false, then we have to
9310 check the mode of the TYPE parameter in certain cases. */
9312 static tree
9313 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9314 tree type, bool ldexp)
9316 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9318 STRIP_NOPS (arg0);
9319 STRIP_NOPS (arg1);
9321 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9322 if (real_zerop (arg0) || integer_zerop (arg1)
9323 || (TREE_CODE (arg0) == REAL_CST
9324 && !real_isfinite (&TREE_REAL_CST (arg0))))
9325 return omit_one_operand_loc (loc, type, arg0, arg1);
9327 /* If both arguments are constant, then try to evaluate it. */
9328 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9329 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9330 && tree_fits_shwi_p (arg1))
9332 /* Bound the maximum adjustment to twice the range of the
9333 mode's valid exponents. Use abs to ensure the range is
9334 positive as a sanity check. */
9335 const long max_exp_adj = 2 *
9336 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9337 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9339 /* Get the user-requested adjustment. */
9340 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9342 /* The requested adjustment must be inside this range. This
9343 is a preliminary cap to avoid things like overflow, we
9344 may still fail to compute the result for other reasons. */
9345 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9347 REAL_VALUE_TYPE initial_result;
9349 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9351 /* Ensure we didn't overflow. */
9352 if (! real_isinf (&initial_result))
9354 const REAL_VALUE_TYPE trunc_result
9355 = real_value_truncate (TYPE_MODE (type), initial_result);
9357 /* Only proceed if the target mode can hold the
9358 resulting value. */
9359 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9360 return build_real (type, trunc_result);
9366 return NULL_TREE;
9369 /* Fold a call to builtin modf. */
9371 static tree
9372 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9374 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9375 return NULL_TREE;
9377 STRIP_NOPS (arg0);
9379 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9380 return NULL_TREE;
9382 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9384 /* Proceed if a valid pointer type was passed in. */
9385 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9387 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9388 REAL_VALUE_TYPE trunc, frac;
9390 switch (value->cl)
9392 case rvc_nan:
9393 case rvc_zero:
9394 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9395 trunc = frac = *value;
9396 break;
9397 case rvc_inf:
9398 /* For +-Inf, return (*arg1 = arg0, +-0). */
9399 frac = dconst0;
9400 frac.sign = value->sign;
9401 trunc = *value;
9402 break;
9403 case rvc_normal:
9404 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9405 real_trunc (&trunc, VOIDmode, value);
9406 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9407 /* If the original number was negative and already
9408 integral, then the fractional part is -0.0. */
9409 if (value->sign && frac.cl == rvc_zero)
9410 frac.sign = value->sign;
9411 break;
9414 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9415 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9416 build_real (rettype, trunc));
9417 TREE_SIDE_EFFECTS (arg1) = 1;
9418 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9419 build_real (rettype, frac));
9422 return NULL_TREE;
9425 /* Given a location LOC, an interclass builtin function decl FNDECL
9426 and its single argument ARG, return an folded expression computing
9427 the same, or NULL_TREE if we either couldn't or didn't want to fold
9428 (the latter happen if there's an RTL instruction available). */
9430 static tree
9431 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9433 machine_mode mode;
9435 if (!validate_arg (arg, REAL_TYPE))
9436 return NULL_TREE;
9438 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9439 return NULL_TREE;
9441 mode = TYPE_MODE (TREE_TYPE (arg));
9443 /* If there is no optab, try generic code. */
9444 switch (DECL_FUNCTION_CODE (fndecl))
9446 tree result;
9448 CASE_FLT_FN (BUILT_IN_ISINF):
9450 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9451 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9452 tree const type = TREE_TYPE (arg);
9453 REAL_VALUE_TYPE r;
9454 char buf[128];
9456 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9457 real_from_string (&r, buf);
9458 result = build_call_expr (isgr_fn, 2,
9459 fold_build1_loc (loc, ABS_EXPR, type, arg),
9460 build_real (type, r));
9461 return result;
9463 CASE_FLT_FN (BUILT_IN_FINITE):
9464 case BUILT_IN_ISFINITE:
9466 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9467 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9468 tree const type = TREE_TYPE (arg);
9469 REAL_VALUE_TYPE r;
9470 char buf[128];
9472 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9473 real_from_string (&r, buf);
9474 result = build_call_expr (isle_fn, 2,
9475 fold_build1_loc (loc, ABS_EXPR, type, arg),
9476 build_real (type, r));
9477 /*result = fold_build2_loc (loc, UNGT_EXPR,
9478 TREE_TYPE (TREE_TYPE (fndecl)),
9479 fold_build1_loc (loc, ABS_EXPR, type, arg),
9480 build_real (type, r));
9481 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9482 TREE_TYPE (TREE_TYPE (fndecl)),
9483 result);*/
9484 return result;
9486 case BUILT_IN_ISNORMAL:
9488 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9489 islessequal(fabs(x),DBL_MAX). */
9490 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9491 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9492 tree const type = TREE_TYPE (arg);
9493 REAL_VALUE_TYPE rmax, rmin;
9494 char buf[128];
9496 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9497 real_from_string (&rmax, buf);
9498 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9499 real_from_string (&rmin, buf);
9500 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9501 result = build_call_expr (isle_fn, 2, arg,
9502 build_real (type, rmax));
9503 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9504 build_call_expr (isge_fn, 2, arg,
9505 build_real (type, rmin)));
9506 return result;
9508 default:
9509 break;
9512 return NULL_TREE;
9515 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9516 ARG is the argument for the call. */
9518 static tree
9519 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9521 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9522 REAL_VALUE_TYPE r;
9524 if (!validate_arg (arg, REAL_TYPE))
9525 return NULL_TREE;
9527 switch (builtin_index)
9529 case BUILT_IN_ISINF:
9530 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9531 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9533 if (TREE_CODE (arg) == REAL_CST)
9535 r = TREE_REAL_CST (arg);
9536 if (real_isinf (&r))
9537 return real_compare (GT_EXPR, &r, &dconst0)
9538 ? integer_one_node : integer_minus_one_node;
9539 else
9540 return integer_zero_node;
9543 return NULL_TREE;
9545 case BUILT_IN_ISINF_SIGN:
9547 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9548 /* In a boolean context, GCC will fold the inner COND_EXPR to
9549 1. So e.g. "if (isinf_sign(x))" would be folded to just
9550 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9551 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9552 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9553 tree tmp = NULL_TREE;
9555 arg = builtin_save_expr (arg);
9557 if (signbit_fn && isinf_fn)
9559 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9560 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9562 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9563 signbit_call, integer_zero_node);
9564 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9565 isinf_call, integer_zero_node);
9567 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9568 integer_minus_one_node, integer_one_node);
9569 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9570 isinf_call, tmp,
9571 integer_zero_node);
9574 return tmp;
9577 case BUILT_IN_ISFINITE:
9578 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9579 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9580 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9582 if (TREE_CODE (arg) == REAL_CST)
9584 r = TREE_REAL_CST (arg);
9585 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9588 return NULL_TREE;
9590 case BUILT_IN_ISNAN:
9591 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9592 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9594 if (TREE_CODE (arg) == REAL_CST)
9596 r = TREE_REAL_CST (arg);
9597 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9600 arg = builtin_save_expr (arg);
9601 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9603 default:
9604 gcc_unreachable ();
9608 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9609 This builtin will generate code to return the appropriate floating
9610 point classification depending on the value of the floating point
9611 number passed in. The possible return values must be supplied as
9612 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9613 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9614 one floating point argument which is "type generic". */
9616 static tree
9617 fold_builtin_fpclassify (location_t loc, tree exp)
9619 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9620 arg, type, res, tmp;
9621 machine_mode mode;
9622 REAL_VALUE_TYPE r;
9623 char buf[128];
9625 /* Verify the required arguments in the original call. */
9626 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9627 INTEGER_TYPE, INTEGER_TYPE,
9628 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9629 return NULL_TREE;
9631 fp_nan = CALL_EXPR_ARG (exp, 0);
9632 fp_infinite = CALL_EXPR_ARG (exp, 1);
9633 fp_normal = CALL_EXPR_ARG (exp, 2);
9634 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9635 fp_zero = CALL_EXPR_ARG (exp, 4);
9636 arg = CALL_EXPR_ARG (exp, 5);
9637 type = TREE_TYPE (arg);
9638 mode = TYPE_MODE (type);
9639 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9641 /* fpclassify(x) ->
9642 isnan(x) ? FP_NAN :
9643 (fabs(x) == Inf ? FP_INFINITE :
9644 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9645 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9647 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9648 build_real (type, dconst0));
9649 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9650 tmp, fp_zero, fp_subnormal);
9652 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9653 real_from_string (&r, buf);
9654 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9655 arg, build_real (type, r));
9656 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9658 if (HONOR_INFINITIES (mode))
9660 real_inf (&r);
9661 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9662 build_real (type, r));
9663 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9664 fp_infinite, res);
9667 if (HONOR_NANS (mode))
9669 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9670 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9673 return res;
9676 /* Fold a call to an unordered comparison function such as
9677 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9678 being called and ARG0 and ARG1 are the arguments for the call.
9679 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9680 the opposite of the desired result. UNORDERED_CODE is used
9681 for modes that can hold NaNs and ORDERED_CODE is used for
9682 the rest. */
9684 static tree
9685 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9686 enum tree_code unordered_code,
9687 enum tree_code ordered_code)
9689 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9690 enum tree_code code;
9691 tree type0, type1;
9692 enum tree_code code0, code1;
9693 tree cmp_type = NULL_TREE;
9695 type0 = TREE_TYPE (arg0);
9696 type1 = TREE_TYPE (arg1);
9698 code0 = TREE_CODE (type0);
9699 code1 = TREE_CODE (type1);
9701 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9702 /* Choose the wider of two real types. */
9703 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9704 ? type0 : type1;
9705 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9706 cmp_type = type0;
9707 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9708 cmp_type = type1;
9710 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9711 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9713 if (unordered_code == UNORDERED_EXPR)
9715 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9716 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9717 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9720 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9721 : ordered_code;
9722 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9723 fold_build2_loc (loc, code, type, arg0, arg1));
9726 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9727 arithmetics if it can never overflow, or into internal functions that
9728 return both result of arithmetics and overflowed boolean flag in
9729 a complex integer result, or some other check for overflow. */
9731 static tree
9732 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9733 tree arg0, tree arg1, tree arg2)
9735 enum internal_fn ifn = IFN_LAST;
9736 tree type = TREE_TYPE (TREE_TYPE (arg2));
9737 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9738 switch (fcode)
9740 case BUILT_IN_ADD_OVERFLOW:
9741 case BUILT_IN_SADD_OVERFLOW:
9742 case BUILT_IN_SADDL_OVERFLOW:
9743 case BUILT_IN_SADDLL_OVERFLOW:
9744 case BUILT_IN_UADD_OVERFLOW:
9745 case BUILT_IN_UADDL_OVERFLOW:
9746 case BUILT_IN_UADDLL_OVERFLOW:
9747 ifn = IFN_ADD_OVERFLOW;
9748 break;
9749 case BUILT_IN_SUB_OVERFLOW:
9750 case BUILT_IN_SSUB_OVERFLOW:
9751 case BUILT_IN_SSUBL_OVERFLOW:
9752 case BUILT_IN_SSUBLL_OVERFLOW:
9753 case BUILT_IN_USUB_OVERFLOW:
9754 case BUILT_IN_USUBL_OVERFLOW:
9755 case BUILT_IN_USUBLL_OVERFLOW:
9756 ifn = IFN_SUB_OVERFLOW;
9757 break;
9758 case BUILT_IN_MUL_OVERFLOW:
9759 case BUILT_IN_SMUL_OVERFLOW:
9760 case BUILT_IN_SMULL_OVERFLOW:
9761 case BUILT_IN_SMULLL_OVERFLOW:
9762 case BUILT_IN_UMUL_OVERFLOW:
9763 case BUILT_IN_UMULL_OVERFLOW:
9764 case BUILT_IN_UMULLL_OVERFLOW:
9765 ifn = IFN_MUL_OVERFLOW;
9766 break;
9767 default:
9768 gcc_unreachable ();
9770 tree ctype = build_complex_type (type);
9771 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9772 2, arg0, arg1);
9773 tree tgt = save_expr (call);
9774 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9775 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9776 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9777 tree store
9778 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9779 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9782 /* Fold a call to built-in function FNDECL with 0 arguments.
9783 IGNORE is true if the result of the function call is ignored. This
9784 function returns NULL_TREE if no simplification was possible. */
9786 static tree
9787 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9789 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9790 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9791 switch (fcode)
9793 CASE_FLT_FN (BUILT_IN_INF):
9794 case BUILT_IN_INFD32:
9795 case BUILT_IN_INFD64:
9796 case BUILT_IN_INFD128:
9797 return fold_builtin_inf (loc, type, true);
9799 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9800 return fold_builtin_inf (loc, type, false);
9802 case BUILT_IN_CLASSIFY_TYPE:
9803 return fold_builtin_classify_type (NULL_TREE);
9805 default:
9806 break;
9808 return NULL_TREE;
9811 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9812 IGNORE is true if the result of the function call is ignored. This
9813 function returns NULL_TREE if no simplification was possible. */
9815 static tree
9816 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9818 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9819 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9820 switch (fcode)
9822 case BUILT_IN_CONSTANT_P:
9824 tree val = fold_builtin_constant_p (arg0);
9826 /* Gimplification will pull the CALL_EXPR for the builtin out of
9827 an if condition. When not optimizing, we'll not CSE it back.
9828 To avoid link error types of regressions, return false now. */
9829 if (!val && !optimize)
9830 val = integer_zero_node;
9832 return val;
9835 case BUILT_IN_CLASSIFY_TYPE:
9836 return fold_builtin_classify_type (arg0);
9838 case BUILT_IN_STRLEN:
9839 return fold_builtin_strlen (loc, type, arg0);
9841 CASE_FLT_FN (BUILT_IN_FABS):
9842 case BUILT_IN_FABSD32:
9843 case BUILT_IN_FABSD64:
9844 case BUILT_IN_FABSD128:
9845 return fold_builtin_fabs (loc, arg0, type);
9847 case BUILT_IN_ABS:
9848 case BUILT_IN_LABS:
9849 case BUILT_IN_LLABS:
9850 case BUILT_IN_IMAXABS:
9851 return fold_builtin_abs (loc, arg0, type);
9853 CASE_FLT_FN (BUILT_IN_CONJ):
9854 if (validate_arg (arg0, COMPLEX_TYPE)
9855 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9856 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9857 break;
9859 CASE_FLT_FN (BUILT_IN_CREAL):
9860 if (validate_arg (arg0, COMPLEX_TYPE)
9861 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9862 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9863 break;
9865 CASE_FLT_FN (BUILT_IN_CIMAG):
9866 if (validate_arg (arg0, COMPLEX_TYPE)
9867 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9868 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9869 break;
9871 CASE_FLT_FN (BUILT_IN_CCOS):
9872 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9874 CASE_FLT_FN (BUILT_IN_CCOSH):
9875 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9877 CASE_FLT_FN (BUILT_IN_CPROJ):
9878 return fold_builtin_cproj (loc, arg0, type);
9880 CASE_FLT_FN (BUILT_IN_CSIN):
9881 if (validate_arg (arg0, COMPLEX_TYPE)
9882 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9883 return do_mpc_arg1 (arg0, type, mpc_sin);
9884 break;
9886 CASE_FLT_FN (BUILT_IN_CSINH):
9887 if (validate_arg (arg0, COMPLEX_TYPE)
9888 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9889 return do_mpc_arg1 (arg0, type, mpc_sinh);
9890 break;
9892 CASE_FLT_FN (BUILT_IN_CTAN):
9893 if (validate_arg (arg0, COMPLEX_TYPE)
9894 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9895 return do_mpc_arg1 (arg0, type, mpc_tan);
9896 break;
9898 CASE_FLT_FN (BUILT_IN_CTANH):
9899 if (validate_arg (arg0, COMPLEX_TYPE)
9900 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9901 return do_mpc_arg1 (arg0, type, mpc_tanh);
9902 break;
9904 CASE_FLT_FN (BUILT_IN_CLOG):
9905 if (validate_arg (arg0, COMPLEX_TYPE)
9906 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9907 return do_mpc_arg1 (arg0, type, mpc_log);
9908 break;
9910 CASE_FLT_FN (BUILT_IN_CSQRT):
9911 if (validate_arg (arg0, COMPLEX_TYPE)
9912 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9913 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9914 break;
9916 CASE_FLT_FN (BUILT_IN_CASIN):
9917 if (validate_arg (arg0, COMPLEX_TYPE)
9918 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9919 return do_mpc_arg1 (arg0, type, mpc_asin);
9920 break;
9922 CASE_FLT_FN (BUILT_IN_CACOS):
9923 if (validate_arg (arg0, COMPLEX_TYPE)
9924 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9925 return do_mpc_arg1 (arg0, type, mpc_acos);
9926 break;
9928 CASE_FLT_FN (BUILT_IN_CATAN):
9929 if (validate_arg (arg0, COMPLEX_TYPE)
9930 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9931 return do_mpc_arg1 (arg0, type, mpc_atan);
9932 break;
9934 CASE_FLT_FN (BUILT_IN_CASINH):
9935 if (validate_arg (arg0, COMPLEX_TYPE)
9936 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9937 return do_mpc_arg1 (arg0, type, mpc_asinh);
9938 break;
9940 CASE_FLT_FN (BUILT_IN_CACOSH):
9941 if (validate_arg (arg0, COMPLEX_TYPE)
9942 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9943 return do_mpc_arg1 (arg0, type, mpc_acosh);
9944 break;
9946 CASE_FLT_FN (BUILT_IN_CATANH):
9947 if (validate_arg (arg0, COMPLEX_TYPE)
9948 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9949 return do_mpc_arg1 (arg0, type, mpc_atanh);
9950 break;
9952 CASE_FLT_FN (BUILT_IN_CABS):
9953 return fold_builtin_cabs (loc, arg0, type, fndecl);
9955 CASE_FLT_FN (BUILT_IN_CARG):
9956 return fold_builtin_carg (loc, arg0, type);
9958 CASE_FLT_FN (BUILT_IN_SQRT):
9959 return fold_builtin_sqrt (loc, arg0, type);
9961 CASE_FLT_FN (BUILT_IN_CBRT):
9962 return fold_builtin_cbrt (loc, arg0, type);
9964 CASE_FLT_FN (BUILT_IN_ASIN):
9965 if (validate_arg (arg0, REAL_TYPE))
9966 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9967 &dconstm1, &dconst1, true);
9968 break;
9970 CASE_FLT_FN (BUILT_IN_ACOS):
9971 if (validate_arg (arg0, REAL_TYPE))
9972 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9973 &dconstm1, &dconst1, true);
9974 break;
9976 CASE_FLT_FN (BUILT_IN_ATAN):
9977 if (validate_arg (arg0, REAL_TYPE))
9978 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9979 break;
9981 CASE_FLT_FN (BUILT_IN_ASINH):
9982 if (validate_arg (arg0, REAL_TYPE))
9983 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9984 break;
9986 CASE_FLT_FN (BUILT_IN_ACOSH):
9987 if (validate_arg (arg0, REAL_TYPE))
9988 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9989 &dconst1, NULL, true);
9990 break;
9992 CASE_FLT_FN (BUILT_IN_ATANH):
9993 if (validate_arg (arg0, REAL_TYPE))
9994 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9995 &dconstm1, &dconst1, false);
9996 break;
9998 CASE_FLT_FN (BUILT_IN_SIN):
9999 if (validate_arg (arg0, REAL_TYPE))
10000 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10001 break;
10003 CASE_FLT_FN (BUILT_IN_COS):
10004 return fold_builtin_cos (loc, arg0, type, fndecl);
10006 CASE_FLT_FN (BUILT_IN_TAN):
10007 return fold_builtin_tan (arg0, type);
10009 CASE_FLT_FN (BUILT_IN_CEXP):
10010 return fold_builtin_cexp (loc, arg0, type);
10012 CASE_FLT_FN (BUILT_IN_CEXPI):
10013 if (validate_arg (arg0, REAL_TYPE))
10014 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10015 break;
10017 CASE_FLT_FN (BUILT_IN_SINH):
10018 if (validate_arg (arg0, REAL_TYPE))
10019 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10020 break;
10022 CASE_FLT_FN (BUILT_IN_COSH):
10023 return fold_builtin_cosh (loc, arg0, type, fndecl);
10025 CASE_FLT_FN (BUILT_IN_TANH):
10026 if (validate_arg (arg0, REAL_TYPE))
10027 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10028 break;
10030 CASE_FLT_FN (BUILT_IN_ERF):
10031 if (validate_arg (arg0, REAL_TYPE))
10032 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10033 break;
10035 CASE_FLT_FN (BUILT_IN_ERFC):
10036 if (validate_arg (arg0, REAL_TYPE))
10037 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10038 break;
10040 CASE_FLT_FN (BUILT_IN_TGAMMA):
10041 if (validate_arg (arg0, REAL_TYPE))
10042 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10043 break;
10045 CASE_FLT_FN (BUILT_IN_EXP):
10046 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10048 CASE_FLT_FN (BUILT_IN_EXP2):
10049 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10051 CASE_FLT_FN (BUILT_IN_EXP10):
10052 CASE_FLT_FN (BUILT_IN_POW10):
10053 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10055 CASE_FLT_FN (BUILT_IN_EXPM1):
10056 if (validate_arg (arg0, REAL_TYPE))
10057 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10058 break;
10060 CASE_FLT_FN (BUILT_IN_LOG):
10061 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10063 CASE_FLT_FN (BUILT_IN_LOG2):
10064 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10066 CASE_FLT_FN (BUILT_IN_LOG10):
10067 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10069 CASE_FLT_FN (BUILT_IN_LOG1P):
10070 if (validate_arg (arg0, REAL_TYPE))
10071 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10072 &dconstm1, NULL, false);
10073 break;
10075 CASE_FLT_FN (BUILT_IN_J0):
10076 if (validate_arg (arg0, REAL_TYPE))
10077 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10078 NULL, NULL, 0);
10079 break;
10081 CASE_FLT_FN (BUILT_IN_J1):
10082 if (validate_arg (arg0, REAL_TYPE))
10083 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10084 NULL, NULL, 0);
10085 break;
10087 CASE_FLT_FN (BUILT_IN_Y0):
10088 if (validate_arg (arg0, REAL_TYPE))
10089 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10090 &dconst0, NULL, false);
10091 break;
10093 CASE_FLT_FN (BUILT_IN_Y1):
10094 if (validate_arg (arg0, REAL_TYPE))
10095 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10096 &dconst0, NULL, false);
10097 break;
10099 CASE_FLT_FN (BUILT_IN_NAN):
10100 case BUILT_IN_NAND32:
10101 case BUILT_IN_NAND64:
10102 case BUILT_IN_NAND128:
10103 return fold_builtin_nan (arg0, type, true);
10105 CASE_FLT_FN (BUILT_IN_NANS):
10106 return fold_builtin_nan (arg0, type, false);
10108 CASE_FLT_FN (BUILT_IN_FLOOR):
10109 return fold_builtin_floor (loc, fndecl, arg0);
10111 CASE_FLT_FN (BUILT_IN_CEIL):
10112 return fold_builtin_ceil (loc, fndecl, arg0);
10114 CASE_FLT_FN (BUILT_IN_TRUNC):
10115 return fold_builtin_trunc (loc, fndecl, arg0);
10117 CASE_FLT_FN (BUILT_IN_ROUND):
10118 return fold_builtin_round (loc, fndecl, arg0);
10120 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10121 CASE_FLT_FN (BUILT_IN_RINT):
10122 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10124 CASE_FLT_FN (BUILT_IN_ICEIL):
10125 CASE_FLT_FN (BUILT_IN_LCEIL):
10126 CASE_FLT_FN (BUILT_IN_LLCEIL):
10127 CASE_FLT_FN (BUILT_IN_LFLOOR):
10128 CASE_FLT_FN (BUILT_IN_IFLOOR):
10129 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10130 CASE_FLT_FN (BUILT_IN_IROUND):
10131 CASE_FLT_FN (BUILT_IN_LROUND):
10132 CASE_FLT_FN (BUILT_IN_LLROUND):
10133 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10135 CASE_FLT_FN (BUILT_IN_IRINT):
10136 CASE_FLT_FN (BUILT_IN_LRINT):
10137 CASE_FLT_FN (BUILT_IN_LLRINT):
10138 return fold_fixed_mathfn (loc, fndecl, arg0);
10140 case BUILT_IN_BSWAP16:
10141 case BUILT_IN_BSWAP32:
10142 case BUILT_IN_BSWAP64:
10143 return fold_builtin_bswap (fndecl, arg0);
10145 CASE_INT_FN (BUILT_IN_FFS):
10146 CASE_INT_FN (BUILT_IN_CLZ):
10147 CASE_INT_FN (BUILT_IN_CTZ):
10148 CASE_INT_FN (BUILT_IN_CLRSB):
10149 CASE_INT_FN (BUILT_IN_POPCOUNT):
10150 CASE_INT_FN (BUILT_IN_PARITY):
10151 return fold_builtin_bitop (fndecl, arg0);
10153 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10154 return fold_builtin_signbit (loc, arg0, type);
10156 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10157 return fold_builtin_significand (loc, arg0, type);
10159 CASE_FLT_FN (BUILT_IN_ILOGB):
10160 CASE_FLT_FN (BUILT_IN_LOGB):
10161 return fold_builtin_logb (loc, arg0, type);
10163 case BUILT_IN_ISASCII:
10164 return fold_builtin_isascii (loc, arg0);
10166 case BUILT_IN_TOASCII:
10167 return fold_builtin_toascii (loc, arg0);
10169 case BUILT_IN_ISDIGIT:
10170 return fold_builtin_isdigit (loc, arg0);
10172 CASE_FLT_FN (BUILT_IN_FINITE):
10173 case BUILT_IN_FINITED32:
10174 case BUILT_IN_FINITED64:
10175 case BUILT_IN_FINITED128:
10176 case BUILT_IN_ISFINITE:
10178 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10179 if (ret)
10180 return ret;
10181 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10184 CASE_FLT_FN (BUILT_IN_ISINF):
10185 case BUILT_IN_ISINFD32:
10186 case BUILT_IN_ISINFD64:
10187 case BUILT_IN_ISINFD128:
10189 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10190 if (ret)
10191 return ret;
10192 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10195 case BUILT_IN_ISNORMAL:
10196 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10198 case BUILT_IN_ISINF_SIGN:
10199 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10201 CASE_FLT_FN (BUILT_IN_ISNAN):
10202 case BUILT_IN_ISNAND32:
10203 case BUILT_IN_ISNAND64:
10204 case BUILT_IN_ISNAND128:
10205 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10207 case BUILT_IN_PRINTF:
10208 case BUILT_IN_PRINTF_UNLOCKED:
10209 case BUILT_IN_VPRINTF:
10210 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10212 case BUILT_IN_FREE:
10213 if (integer_zerop (arg0))
10214 return build_empty_stmt (loc);
10215 break;
10217 default:
10218 break;
10221 return NULL_TREE;
10225 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10226 IGNORE is true if the result of the function call is ignored. This
10227 function returns NULL_TREE if no simplification was possible. */
10229 static tree
10230 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10232 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10233 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10235 switch (fcode)
10237 CASE_FLT_FN (BUILT_IN_JN):
10238 if (validate_arg (arg0, INTEGER_TYPE)
10239 && validate_arg (arg1, REAL_TYPE))
10240 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10241 break;
10243 CASE_FLT_FN (BUILT_IN_YN):
10244 if (validate_arg (arg0, INTEGER_TYPE)
10245 && validate_arg (arg1, REAL_TYPE))
10246 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10247 &dconst0, false);
10248 break;
10250 CASE_FLT_FN (BUILT_IN_DREM):
10251 CASE_FLT_FN (BUILT_IN_REMAINDER):
10252 if (validate_arg (arg0, REAL_TYPE)
10253 && validate_arg (arg1, REAL_TYPE))
10254 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10255 break;
10257 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10258 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10259 if (validate_arg (arg0, REAL_TYPE)
10260 && validate_arg (arg1, POINTER_TYPE))
10261 return do_mpfr_lgamma_r (arg0, arg1, type);
10262 break;
10264 CASE_FLT_FN (BUILT_IN_ATAN2):
10265 if (validate_arg (arg0, REAL_TYPE)
10266 && validate_arg (arg1, REAL_TYPE))
10267 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10268 break;
10270 CASE_FLT_FN (BUILT_IN_FDIM):
10271 if (validate_arg (arg0, REAL_TYPE)
10272 && validate_arg (arg1, REAL_TYPE))
10273 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10274 break;
10276 CASE_FLT_FN (BUILT_IN_HYPOT):
10277 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10279 CASE_FLT_FN (BUILT_IN_CPOW):
10280 if (validate_arg (arg0, COMPLEX_TYPE)
10281 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10282 && validate_arg (arg1, COMPLEX_TYPE)
10283 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10284 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10285 break;
10287 CASE_FLT_FN (BUILT_IN_LDEXP):
10288 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10289 CASE_FLT_FN (BUILT_IN_SCALBN):
10290 CASE_FLT_FN (BUILT_IN_SCALBLN):
10291 return fold_builtin_load_exponent (loc, arg0, arg1,
10292 type, /*ldexp=*/false);
10294 CASE_FLT_FN (BUILT_IN_FREXP):
10295 return fold_builtin_frexp (loc, arg0, arg1, type);
10297 CASE_FLT_FN (BUILT_IN_MODF):
10298 return fold_builtin_modf (loc, arg0, arg1, type);
10300 case BUILT_IN_STRSTR:
10301 return fold_builtin_strstr (loc, arg0, arg1, type);
10303 case BUILT_IN_STRSPN:
10304 return fold_builtin_strspn (loc, arg0, arg1);
10306 case BUILT_IN_STRCSPN:
10307 return fold_builtin_strcspn (loc, arg0, arg1);
10309 case BUILT_IN_STRCHR:
10310 case BUILT_IN_INDEX:
10311 return fold_builtin_strchr (loc, arg0, arg1, type);
10313 case BUILT_IN_STRRCHR:
10314 case BUILT_IN_RINDEX:
10315 return fold_builtin_strrchr (loc, arg0, arg1, type);
10317 case BUILT_IN_STPCPY:
10318 if (ignore)
10320 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10321 if (!fn)
10322 break;
10324 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10326 else
10327 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10328 break;
10330 case BUILT_IN_STRCMP:
10331 return fold_builtin_strcmp (loc, arg0, arg1);
10333 case BUILT_IN_STRPBRK:
10334 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10336 case BUILT_IN_EXPECT:
10337 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10339 CASE_FLT_FN (BUILT_IN_POW):
10340 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10342 CASE_FLT_FN (BUILT_IN_POWI):
10343 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10345 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10346 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10348 CASE_FLT_FN (BUILT_IN_FMIN):
10349 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10351 CASE_FLT_FN (BUILT_IN_FMAX):
10352 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10354 case BUILT_IN_ISGREATER:
10355 return fold_builtin_unordered_cmp (loc, fndecl,
10356 arg0, arg1, UNLE_EXPR, LE_EXPR);
10357 case BUILT_IN_ISGREATEREQUAL:
10358 return fold_builtin_unordered_cmp (loc, fndecl,
10359 arg0, arg1, UNLT_EXPR, LT_EXPR);
10360 case BUILT_IN_ISLESS:
10361 return fold_builtin_unordered_cmp (loc, fndecl,
10362 arg0, arg1, UNGE_EXPR, GE_EXPR);
10363 case BUILT_IN_ISLESSEQUAL:
10364 return fold_builtin_unordered_cmp (loc, fndecl,
10365 arg0, arg1, UNGT_EXPR, GT_EXPR);
10366 case BUILT_IN_ISLESSGREATER:
10367 return fold_builtin_unordered_cmp (loc, fndecl,
10368 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10369 case BUILT_IN_ISUNORDERED:
10370 return fold_builtin_unordered_cmp (loc, fndecl,
10371 arg0, arg1, UNORDERED_EXPR,
10372 NOP_EXPR);
10374 /* We do the folding for va_start in the expander. */
10375 case BUILT_IN_VA_START:
10376 break;
10378 case BUILT_IN_OBJECT_SIZE:
10379 return fold_builtin_object_size (arg0, arg1);
10381 case BUILT_IN_PRINTF:
10382 case BUILT_IN_PRINTF_UNLOCKED:
10383 case BUILT_IN_VPRINTF:
10384 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10386 case BUILT_IN_PRINTF_CHK:
10387 case BUILT_IN_VPRINTF_CHK:
10388 if (!validate_arg (arg0, INTEGER_TYPE)
10389 || TREE_SIDE_EFFECTS (arg0))
10390 return NULL_TREE;
10391 else
10392 return fold_builtin_printf (loc, fndecl,
10393 arg1, NULL_TREE, ignore, fcode);
10394 break;
10396 case BUILT_IN_FPRINTF:
10397 case BUILT_IN_FPRINTF_UNLOCKED:
10398 case BUILT_IN_VFPRINTF:
10399 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10400 ignore, fcode);
10402 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10403 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10405 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10406 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10408 default:
10409 break;
10411 return NULL_TREE;
10414 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10415 and ARG2. IGNORE is true if the result of the function call is ignored.
10416 This function returns NULL_TREE if no simplification was possible. */
10418 static tree
10419 fold_builtin_3 (location_t loc, tree fndecl,
10420 tree arg0, tree arg1, tree arg2, bool ignore)
10422 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10423 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10424 switch (fcode)
10427 CASE_FLT_FN (BUILT_IN_SINCOS):
10428 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10430 CASE_FLT_FN (BUILT_IN_FMA):
10431 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10432 break;
10434 CASE_FLT_FN (BUILT_IN_REMQUO):
10435 if (validate_arg (arg0, REAL_TYPE)
10436 && validate_arg (arg1, REAL_TYPE)
10437 && validate_arg (arg2, POINTER_TYPE))
10438 return do_mpfr_remquo (arg0, arg1, arg2);
10439 break;
10441 case BUILT_IN_STRNCAT:
10442 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10444 case BUILT_IN_STRNCMP:
10445 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10447 case BUILT_IN_MEMCHR:
10448 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10450 case BUILT_IN_BCMP:
10451 case BUILT_IN_MEMCMP:
10452 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10454 case BUILT_IN_PRINTF_CHK:
10455 case BUILT_IN_VPRINTF_CHK:
10456 if (!validate_arg (arg0, INTEGER_TYPE)
10457 || TREE_SIDE_EFFECTS (arg0))
10458 return NULL_TREE;
10459 else
10460 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10461 break;
10463 case BUILT_IN_FPRINTF:
10464 case BUILT_IN_FPRINTF_UNLOCKED:
10465 case BUILT_IN_VFPRINTF:
10466 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10467 ignore, fcode);
10469 case BUILT_IN_FPRINTF_CHK:
10470 case BUILT_IN_VFPRINTF_CHK:
10471 if (!validate_arg (arg1, INTEGER_TYPE)
10472 || TREE_SIDE_EFFECTS (arg1))
10473 return NULL_TREE;
10474 else
10475 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10476 ignore, fcode);
10478 case BUILT_IN_EXPECT:
10479 return fold_builtin_expect (loc, arg0, arg1, arg2);
10481 case BUILT_IN_ADD_OVERFLOW:
10482 case BUILT_IN_SUB_OVERFLOW:
10483 case BUILT_IN_MUL_OVERFLOW:
10484 case BUILT_IN_SADD_OVERFLOW:
10485 case BUILT_IN_SADDL_OVERFLOW:
10486 case BUILT_IN_SADDLL_OVERFLOW:
10487 case BUILT_IN_SSUB_OVERFLOW:
10488 case BUILT_IN_SSUBL_OVERFLOW:
10489 case BUILT_IN_SSUBLL_OVERFLOW:
10490 case BUILT_IN_SMUL_OVERFLOW:
10491 case BUILT_IN_SMULL_OVERFLOW:
10492 case BUILT_IN_SMULLL_OVERFLOW:
10493 case BUILT_IN_UADD_OVERFLOW:
10494 case BUILT_IN_UADDL_OVERFLOW:
10495 case BUILT_IN_UADDLL_OVERFLOW:
10496 case BUILT_IN_USUB_OVERFLOW:
10497 case BUILT_IN_USUBL_OVERFLOW:
10498 case BUILT_IN_USUBLL_OVERFLOW:
10499 case BUILT_IN_UMUL_OVERFLOW:
10500 case BUILT_IN_UMULL_OVERFLOW:
10501 case BUILT_IN_UMULLL_OVERFLOW:
10502 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10504 default:
10505 break;
10507 return NULL_TREE;
10510 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10511 ARG2, and ARG3. IGNORE is true if the result of the function call is
10512 ignored. This function returns NULL_TREE if no simplification was
10513 possible. */
10515 static tree
10516 fold_builtin_4 (location_t loc, tree fndecl,
10517 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10519 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10521 switch (fcode)
10523 case BUILT_IN_FPRINTF_CHK:
10524 case BUILT_IN_VFPRINTF_CHK:
10525 if (!validate_arg (arg1, INTEGER_TYPE)
10526 || TREE_SIDE_EFFECTS (arg1))
10527 return NULL_TREE;
10528 else
10529 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10530 ignore, fcode);
10531 break;
10533 default:
10534 break;
10536 return NULL_TREE;
10539 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10540 arguments, where NARGS <= 4. IGNORE is true if the result of the
10541 function call is ignored. This function returns NULL_TREE if no
10542 simplification was possible. Note that this only folds builtins with
10543 fixed argument patterns. Foldings that do varargs-to-varargs
10544 transformations, or that match calls with more than 4 arguments,
10545 need to be handled with fold_builtin_varargs instead. */
10547 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10549 tree
10550 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10552 tree ret = NULL_TREE;
10554 switch (nargs)
10556 case 0:
10557 ret = fold_builtin_0 (loc, fndecl, ignore);
10558 break;
10559 case 1:
10560 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10561 break;
10562 case 2:
10563 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10564 break;
10565 case 3:
10566 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10567 break;
10568 case 4:
10569 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10570 ignore);
10571 break;
10572 default:
10573 break;
10575 if (ret)
10577 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10578 SET_EXPR_LOCATION (ret, loc);
10579 TREE_NO_WARNING (ret) = 1;
10580 return ret;
10582 return NULL_TREE;
10585 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10586 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10587 of arguments in ARGS to be omitted. OLDNARGS is the number of
10588 elements in ARGS. */
10590 static tree
10591 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10592 int skip, tree fndecl, int n, va_list newargs)
10594 int nargs = oldnargs - skip + n;
10595 tree *buffer;
10597 if (n > 0)
10599 int i, j;
10601 buffer = XALLOCAVEC (tree, nargs);
10602 for (i = 0; i < n; i++)
10603 buffer[i] = va_arg (newargs, tree);
10604 for (j = skip; j < oldnargs; j++, i++)
10605 buffer[i] = args[j];
10607 else
10608 buffer = args + skip;
10610 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10613 /* Return true if FNDECL shouldn't be folded right now.
10614 If a built-in function has an inline attribute always_inline
10615 wrapper, defer folding it after always_inline functions have
10616 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10617 might not be performed. */
10619 bool
10620 avoid_folding_inline_builtin (tree fndecl)
10622 return (DECL_DECLARED_INLINE_P (fndecl)
10623 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10624 && cfun
10625 && !cfun->always_inline_functions_inlined
10626 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10629 /* A wrapper function for builtin folding that prevents warnings for
10630 "statement without effect" and the like, caused by removing the
10631 call node earlier than the warning is generated. */
10633 tree
10634 fold_call_expr (location_t loc, tree exp, bool ignore)
10636 tree ret = NULL_TREE;
10637 tree fndecl = get_callee_fndecl (exp);
10638 if (fndecl
10639 && TREE_CODE (fndecl) == FUNCTION_DECL
10640 && DECL_BUILT_IN (fndecl)
10641 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10642 yet. Defer folding until we see all the arguments
10643 (after inlining). */
10644 && !CALL_EXPR_VA_ARG_PACK (exp))
10646 int nargs = call_expr_nargs (exp);
10648 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10649 instead last argument is __builtin_va_arg_pack (). Defer folding
10650 even in that case, until arguments are finalized. */
10651 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10653 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10654 if (fndecl2
10655 && TREE_CODE (fndecl2) == FUNCTION_DECL
10656 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10657 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10658 return NULL_TREE;
10661 if (avoid_folding_inline_builtin (fndecl))
10662 return NULL_TREE;
10664 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10665 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10666 CALL_EXPR_ARGP (exp), ignore);
10667 else
10669 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10671 tree *args = CALL_EXPR_ARGP (exp);
10672 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10674 if (!ret)
10675 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10676 if (ret)
10677 return ret;
10680 return NULL_TREE;
10683 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10684 N arguments are passed in the array ARGARRAY. */
10686 tree
10687 fold_builtin_call_array (location_t loc, tree type,
10688 tree fn,
10689 int n,
10690 tree *argarray)
10692 tree ret = NULL_TREE;
10693 tree exp;
10695 if (TREE_CODE (fn) == ADDR_EXPR)
10697 tree fndecl = TREE_OPERAND (fn, 0);
10698 if (TREE_CODE (fndecl) == FUNCTION_DECL
10699 && DECL_BUILT_IN (fndecl))
10701 /* If last argument is __builtin_va_arg_pack (), arguments to this
10702 function are not finalized yet. Defer folding until they are. */
10703 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10705 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10706 if (fndecl2
10707 && TREE_CODE (fndecl2) == FUNCTION_DECL
10708 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10709 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10710 return build_call_array_loc (loc, type, fn, n, argarray);
10712 if (avoid_folding_inline_builtin (fndecl))
10713 return build_call_array_loc (loc, type, fn, n, argarray);
10714 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10716 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10717 if (ret)
10718 return ret;
10720 return build_call_array_loc (loc, type, fn, n, argarray);
10722 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10724 /* First try the transformations that don't require consing up
10725 an exp. */
10726 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10727 if (ret)
10728 return ret;
10731 /* If we got this far, we need to build an exp. */
10732 exp = build_call_array_loc (loc, type, fn, n, argarray);
10733 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10734 return ret ? ret : exp;
10738 return build_call_array_loc (loc, type, fn, n, argarray);
10741 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10742 along with N new arguments specified as the "..." parameters. SKIP
10743 is the number of arguments in EXP to be omitted. This function is used
10744 to do varargs-to-varargs transformations. */
10746 static tree
10747 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10749 va_list ap;
10750 tree t;
10752 va_start (ap, n);
10753 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10754 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10755 va_end (ap);
10757 return t;
10760 /* Validate a single argument ARG against a tree code CODE representing
10761 a type. */
10763 static bool
10764 validate_arg (const_tree arg, enum tree_code code)
10766 if (!arg)
10767 return false;
10768 else if (code == POINTER_TYPE)
10769 return POINTER_TYPE_P (TREE_TYPE (arg));
10770 else if (code == INTEGER_TYPE)
10771 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10772 return code == TREE_CODE (TREE_TYPE (arg));
10775 /* This function validates the types of a function call argument list
10776 against a specified list of tree_codes. If the last specifier is a 0,
10777 that represents an ellipses, otherwise the last specifier must be a
10778 VOID_TYPE.
10780 This is the GIMPLE version of validate_arglist. Eventually we want to
10781 completely convert builtins.c to work from GIMPLEs and the tree based
10782 validate_arglist will then be removed. */
10784 bool
10785 validate_gimple_arglist (const gcall *call, ...)
10787 enum tree_code code;
10788 bool res = 0;
10789 va_list ap;
10790 const_tree arg;
10791 size_t i;
10793 va_start (ap, call);
10794 i = 0;
10798 code = (enum tree_code) va_arg (ap, int);
10799 switch (code)
10801 case 0:
10802 /* This signifies an ellipses, any further arguments are all ok. */
10803 res = true;
10804 goto end;
10805 case VOID_TYPE:
10806 /* This signifies an endlink, if no arguments remain, return
10807 true, otherwise return false. */
10808 res = (i == gimple_call_num_args (call));
10809 goto end;
10810 default:
10811 /* If no parameters remain or the parameter's code does not
10812 match the specified code, return false. Otherwise continue
10813 checking any remaining arguments. */
10814 arg = gimple_call_arg (call, i++);
10815 if (!validate_arg (arg, code))
10816 goto end;
10817 break;
10820 while (1);
10822 /* We need gotos here since we can only have one VA_CLOSE in a
10823 function. */
10824 end: ;
10825 va_end (ap);
10827 return res;
10830 /* Default target-specific builtin expander that does nothing. */
10833 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10834 rtx target ATTRIBUTE_UNUSED,
10835 rtx subtarget ATTRIBUTE_UNUSED,
10836 machine_mode mode ATTRIBUTE_UNUSED,
10837 int ignore ATTRIBUTE_UNUSED)
10839 return NULL_RTX;
10842 /* Returns true is EXP represents data that would potentially reside
10843 in a readonly section. */
10845 bool
10846 readonly_data_expr (tree exp)
10848 STRIP_NOPS (exp);
10850 if (TREE_CODE (exp) != ADDR_EXPR)
10851 return false;
10853 exp = get_base_address (TREE_OPERAND (exp, 0));
10854 if (!exp)
10855 return false;
10857 /* Make sure we call decl_readonly_section only for trees it
10858 can handle (since it returns true for everything it doesn't
10859 understand). */
10860 if (TREE_CODE (exp) == STRING_CST
10861 || TREE_CODE (exp) == CONSTRUCTOR
10862 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10863 return decl_readonly_section (exp, 0);
10864 else
10865 return false;
10868 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10869 to the call, and TYPE is its return type.
10871 Return NULL_TREE if no simplification was possible, otherwise return the
10872 simplified form of the call as a tree.
10874 The simplified form may be a constant or other expression which
10875 computes the same value, but in a more efficient manner (including
10876 calls to other builtin functions).
10878 The call may contain arguments which need to be evaluated, but
10879 which are not useful to determine the result of the call. In
10880 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10881 COMPOUND_EXPR will be an argument which must be evaluated.
10882 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10883 COMPOUND_EXPR in the chain will contain the tree for the simplified
10884 form of the builtin function call. */
10886 static tree
10887 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10889 if (!validate_arg (s1, POINTER_TYPE)
10890 || !validate_arg (s2, POINTER_TYPE))
10891 return NULL_TREE;
10892 else
10894 tree fn;
10895 const char *p1, *p2;
10897 p2 = c_getstr (s2);
10898 if (p2 == NULL)
10899 return NULL_TREE;
10901 p1 = c_getstr (s1);
10902 if (p1 != NULL)
10904 const char *r = strstr (p1, p2);
10905 tree tem;
10907 if (r == NULL)
10908 return build_int_cst (TREE_TYPE (s1), 0);
10910 /* Return an offset into the constant string argument. */
10911 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10912 return fold_convert_loc (loc, type, tem);
10915 /* The argument is const char *, and the result is char *, so we need
10916 a type conversion here to avoid a warning. */
10917 if (p2[0] == '\0')
10918 return fold_convert_loc (loc, type, s1);
10920 if (p2[1] != '\0')
10921 return NULL_TREE;
10923 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10924 if (!fn)
10925 return NULL_TREE;
10927 /* New argument list transforming strstr(s1, s2) to
10928 strchr(s1, s2[0]). */
10929 return build_call_expr_loc (loc, fn, 2, s1,
10930 build_int_cst (integer_type_node, p2[0]));
10934 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10935 the call, and TYPE is its return type.
10937 Return NULL_TREE if no simplification was possible, otherwise return the
10938 simplified form of the call as a tree.
10940 The simplified form may be a constant or other expression which
10941 computes the same value, but in a more efficient manner (including
10942 calls to other builtin functions).
10944 The call may contain arguments which need to be evaluated, but
10945 which are not useful to determine the result of the call. In
10946 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10947 COMPOUND_EXPR will be an argument which must be evaluated.
10948 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10949 COMPOUND_EXPR in the chain will contain the tree for the simplified
10950 form of the builtin function call. */
10952 static tree
10953 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10955 if (!validate_arg (s1, POINTER_TYPE)
10956 || !validate_arg (s2, INTEGER_TYPE))
10957 return NULL_TREE;
10958 else
10960 const char *p1;
10962 if (TREE_CODE (s2) != INTEGER_CST)
10963 return NULL_TREE;
10965 p1 = c_getstr (s1);
10966 if (p1 != NULL)
10968 char c;
10969 const char *r;
10970 tree tem;
10972 if (target_char_cast (s2, &c))
10973 return NULL_TREE;
10975 r = strchr (p1, c);
10977 if (r == NULL)
10978 return build_int_cst (TREE_TYPE (s1), 0);
10980 /* Return an offset into the constant string argument. */
10981 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10982 return fold_convert_loc (loc, type, tem);
10984 return NULL_TREE;
10988 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10989 the call, and TYPE is its return type.
10991 Return NULL_TREE if no simplification was possible, otherwise return the
10992 simplified form of the call as a tree.
10994 The simplified form may be a constant or other expression which
10995 computes the same value, but in a more efficient manner (including
10996 calls to other builtin functions).
10998 The call may contain arguments which need to be evaluated, but
10999 which are not useful to determine the result of the call. In
11000 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11001 COMPOUND_EXPR will be an argument which must be evaluated.
11002 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11003 COMPOUND_EXPR in the chain will contain the tree for the simplified
11004 form of the builtin function call. */
11006 static tree
11007 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11009 if (!validate_arg (s1, POINTER_TYPE)
11010 || !validate_arg (s2, INTEGER_TYPE))
11011 return NULL_TREE;
11012 else
11014 tree fn;
11015 const char *p1;
11017 if (TREE_CODE (s2) != INTEGER_CST)
11018 return NULL_TREE;
11020 p1 = c_getstr (s1);
11021 if (p1 != NULL)
11023 char c;
11024 const char *r;
11025 tree tem;
11027 if (target_char_cast (s2, &c))
11028 return NULL_TREE;
11030 r = strrchr (p1, c);
11032 if (r == NULL)
11033 return build_int_cst (TREE_TYPE (s1), 0);
11035 /* Return an offset into the constant string argument. */
11036 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11037 return fold_convert_loc (loc, type, tem);
11040 if (! integer_zerop (s2))
11041 return NULL_TREE;
11043 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11044 if (!fn)
11045 return NULL_TREE;
11047 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11048 return build_call_expr_loc (loc, fn, 2, s1, s2);
11052 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11053 to the call, and TYPE is its return type.
11055 Return NULL_TREE if no simplification was possible, otherwise return the
11056 simplified form of the call as a tree.
11058 The simplified form may be a constant or other expression which
11059 computes the same value, but in a more efficient manner (including
11060 calls to other builtin functions).
11062 The call may contain arguments which need to be evaluated, but
11063 which are not useful to determine the result of the call. In
11064 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11065 COMPOUND_EXPR will be an argument which must be evaluated.
11066 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11067 COMPOUND_EXPR in the chain will contain the tree for the simplified
11068 form of the builtin function call. */
11070 static tree
11071 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11073 if (!validate_arg (s1, POINTER_TYPE)
11074 || !validate_arg (s2, POINTER_TYPE))
11075 return NULL_TREE;
11076 else
11078 tree fn;
11079 const char *p1, *p2;
11081 p2 = c_getstr (s2);
11082 if (p2 == NULL)
11083 return NULL_TREE;
11085 p1 = c_getstr (s1);
11086 if (p1 != NULL)
11088 const char *r = strpbrk (p1, p2);
11089 tree tem;
11091 if (r == NULL)
11092 return build_int_cst (TREE_TYPE (s1), 0);
11094 /* Return an offset into the constant string argument. */
11095 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11096 return fold_convert_loc (loc, type, tem);
11099 if (p2[0] == '\0')
11100 /* strpbrk(x, "") == NULL.
11101 Evaluate and ignore s1 in case it had side-effects. */
11102 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11104 if (p2[1] != '\0')
11105 return NULL_TREE; /* Really call strpbrk. */
11107 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11108 if (!fn)
11109 return NULL_TREE;
11111 /* New argument list transforming strpbrk(s1, s2) to
11112 strchr(s1, s2[0]). */
11113 return build_call_expr_loc (loc, fn, 2, s1,
11114 build_int_cst (integer_type_node, p2[0]));
11118 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11119 arguments to the call.
11121 Return NULL_TREE if no simplification was possible, otherwise return the
11122 simplified form of the call as a tree.
11124 The simplified form may be a constant or other expression which
11125 computes the same value, but in a more efficient manner (including
11126 calls to other builtin functions).
11128 The call may contain arguments which need to be evaluated, but
11129 which are not useful to determine the result of the call. In
11130 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11131 COMPOUND_EXPR will be an argument which must be evaluated.
11132 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11133 COMPOUND_EXPR in the chain will contain the tree for the simplified
11134 form of the builtin function call. */
11136 static tree
11137 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11139 if (!validate_arg (dst, POINTER_TYPE)
11140 || !validate_arg (src, POINTER_TYPE)
11141 || !validate_arg (len, INTEGER_TYPE))
11142 return NULL_TREE;
11143 else
11145 const char *p = c_getstr (src);
11147 /* If the requested length is zero, or the src parameter string
11148 length is zero, return the dst parameter. */
11149 if (integer_zerop (len) || (p && *p == '\0'))
11150 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11152 /* If the requested len is greater than or equal to the string
11153 length, call strcat. */
11154 if (TREE_CODE (len) == INTEGER_CST && p
11155 && compare_tree_int (len, strlen (p)) >= 0)
11157 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11159 /* If the replacement _DECL isn't initialized, don't do the
11160 transformation. */
11161 if (!fn)
11162 return NULL_TREE;
11164 return build_call_expr_loc (loc, fn, 2, dst, src);
11166 return NULL_TREE;
11170 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11171 to the call.
11173 Return NULL_TREE if no simplification was possible, otherwise return the
11174 simplified form of the call as a tree.
11176 The simplified form may be a constant or other expression which
11177 computes the same value, but in a more efficient manner (including
11178 calls to other builtin functions).
11180 The call may contain arguments which need to be evaluated, but
11181 which are not useful to determine the result of the call. In
11182 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11183 COMPOUND_EXPR will be an argument which must be evaluated.
11184 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11185 COMPOUND_EXPR in the chain will contain the tree for the simplified
11186 form of the builtin function call. */
11188 static tree
11189 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11191 if (!validate_arg (s1, POINTER_TYPE)
11192 || !validate_arg (s2, POINTER_TYPE))
11193 return NULL_TREE;
11194 else
11196 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11198 /* If both arguments are constants, evaluate at compile-time. */
11199 if (p1 && p2)
11201 const size_t r = strspn (p1, p2);
11202 return build_int_cst (size_type_node, r);
11205 /* If either argument is "", return NULL_TREE. */
11206 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11207 /* Evaluate and ignore both arguments in case either one has
11208 side-effects. */
11209 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11210 s1, s2);
11211 return NULL_TREE;
11215 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11216 to the call.
11218 Return NULL_TREE if no simplification was possible, otherwise return the
11219 simplified form of the call as a tree.
11221 The simplified form may be a constant or other expression which
11222 computes the same value, but in a more efficient manner (including
11223 calls to other builtin functions).
11225 The call may contain arguments which need to be evaluated, but
11226 which are not useful to determine the result of the call. In
11227 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11228 COMPOUND_EXPR will be an argument which must be evaluated.
11229 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11230 COMPOUND_EXPR in the chain will contain the tree for the simplified
11231 form of the builtin function call. */
11233 static tree
11234 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11236 if (!validate_arg (s1, POINTER_TYPE)
11237 || !validate_arg (s2, POINTER_TYPE))
11238 return NULL_TREE;
11239 else
11241 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11243 /* If both arguments are constants, evaluate at compile-time. */
11244 if (p1 && p2)
11246 const size_t r = strcspn (p1, p2);
11247 return build_int_cst (size_type_node, r);
11250 /* If the first argument is "", return NULL_TREE. */
11251 if (p1 && *p1 == '\0')
11253 /* Evaluate and ignore argument s2 in case it has
11254 side-effects. */
11255 return omit_one_operand_loc (loc, size_type_node,
11256 size_zero_node, s2);
11259 /* If the second argument is "", return __builtin_strlen(s1). */
11260 if (p2 && *p2 == '\0')
11262 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11264 /* If the replacement _DECL isn't initialized, don't do the
11265 transformation. */
11266 if (!fn)
11267 return NULL_TREE;
11269 return build_call_expr_loc (loc, fn, 1, s1);
11271 return NULL_TREE;
11275 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11276 produced. False otherwise. This is done so that we don't output the error
11277 or warning twice or three times. */
11279 bool
11280 fold_builtin_next_arg (tree exp, bool va_start_p)
11282 tree fntype = TREE_TYPE (current_function_decl);
11283 int nargs = call_expr_nargs (exp);
11284 tree arg;
11285 /* There is good chance the current input_location points inside the
11286 definition of the va_start macro (perhaps on the token for
11287 builtin) in a system header, so warnings will not be emitted.
11288 Use the location in real source code. */
11289 source_location current_location =
11290 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11291 NULL);
11293 if (!stdarg_p (fntype))
11295 error ("%<va_start%> used in function with fixed args");
11296 return true;
11299 if (va_start_p)
11301 if (va_start_p && (nargs != 2))
11303 error ("wrong number of arguments to function %<va_start%>");
11304 return true;
11306 arg = CALL_EXPR_ARG (exp, 1);
11308 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11309 when we checked the arguments and if needed issued a warning. */
11310 else
11312 if (nargs == 0)
11314 /* Evidently an out of date version of <stdarg.h>; can't validate
11315 va_start's second argument, but can still work as intended. */
11316 warning_at (current_location,
11317 OPT_Wvarargs,
11318 "%<__builtin_next_arg%> called without an argument");
11319 return true;
11321 else if (nargs > 1)
11323 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11324 return true;
11326 arg = CALL_EXPR_ARG (exp, 0);
11329 if (TREE_CODE (arg) == SSA_NAME)
11330 arg = SSA_NAME_VAR (arg);
11332 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11333 or __builtin_next_arg (0) the first time we see it, after checking
11334 the arguments and if needed issuing a warning. */
11335 if (!integer_zerop (arg))
11337 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11339 /* Strip off all nops for the sake of the comparison. This
11340 is not quite the same as STRIP_NOPS. It does more.
11341 We must also strip off INDIRECT_EXPR for C++ reference
11342 parameters. */
11343 while (CONVERT_EXPR_P (arg)
11344 || TREE_CODE (arg) == INDIRECT_REF)
11345 arg = TREE_OPERAND (arg, 0);
11346 if (arg != last_parm)
11348 /* FIXME: Sometimes with the tree optimizers we can get the
11349 not the last argument even though the user used the last
11350 argument. We just warn and set the arg to be the last
11351 argument so that we will get wrong-code because of
11352 it. */
11353 warning_at (current_location,
11354 OPT_Wvarargs,
11355 "second parameter of %<va_start%> not last named argument");
11358 /* Undefined by C99 7.15.1.4p4 (va_start):
11359 "If the parameter parmN is declared with the register storage
11360 class, with a function or array type, or with a type that is
11361 not compatible with the type that results after application of
11362 the default argument promotions, the behavior is undefined."
11364 else if (DECL_REGISTER (arg))
11366 warning_at (current_location,
11367 OPT_Wvarargs,
11368 "undefined behaviour when second parameter of "
11369 "%<va_start%> is declared with %<register%> storage");
11372 /* We want to verify the second parameter just once before the tree
11373 optimizers are run and then avoid keeping it in the tree,
11374 as otherwise we could warn even for correct code like:
11375 void foo (int i, ...)
11376 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11377 if (va_start_p)
11378 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11379 else
11380 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11382 return false;
11386 /* Expand a call EXP to __builtin_object_size. */
11388 static rtx
11389 expand_builtin_object_size (tree exp)
11391 tree ost;
11392 int object_size_type;
11393 tree fndecl = get_callee_fndecl (exp);
11395 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11397 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11398 exp, fndecl);
11399 expand_builtin_trap ();
11400 return const0_rtx;
11403 ost = CALL_EXPR_ARG (exp, 1);
11404 STRIP_NOPS (ost);
11406 if (TREE_CODE (ost) != INTEGER_CST
11407 || tree_int_cst_sgn (ost) < 0
11408 || compare_tree_int (ost, 3) > 0)
11410 error ("%Klast argument of %D is not integer constant between 0 and 3",
11411 exp, fndecl);
11412 expand_builtin_trap ();
11413 return const0_rtx;
11416 object_size_type = tree_to_shwi (ost);
11418 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11421 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11422 FCODE is the BUILT_IN_* to use.
11423 Return NULL_RTX if we failed; the caller should emit a normal call,
11424 otherwise try to get the result in TARGET, if convenient (and in
11425 mode MODE if that's convenient). */
11427 static rtx
11428 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11429 enum built_in_function fcode)
11431 tree dest, src, len, size;
11433 if (!validate_arglist (exp,
11434 POINTER_TYPE,
11435 fcode == BUILT_IN_MEMSET_CHK
11436 ? INTEGER_TYPE : POINTER_TYPE,
11437 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11438 return NULL_RTX;
11440 dest = CALL_EXPR_ARG (exp, 0);
11441 src = CALL_EXPR_ARG (exp, 1);
11442 len = CALL_EXPR_ARG (exp, 2);
11443 size = CALL_EXPR_ARG (exp, 3);
11445 if (! tree_fits_uhwi_p (size))
11446 return NULL_RTX;
11448 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11450 tree fn;
11452 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11454 warning_at (tree_nonartificial_location (exp),
11455 0, "%Kcall to %D will always overflow destination buffer",
11456 exp, get_callee_fndecl (exp));
11457 return NULL_RTX;
11460 fn = NULL_TREE;
11461 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11462 mem{cpy,pcpy,move,set} is available. */
11463 switch (fcode)
11465 case BUILT_IN_MEMCPY_CHK:
11466 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11467 break;
11468 case BUILT_IN_MEMPCPY_CHK:
11469 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11470 break;
11471 case BUILT_IN_MEMMOVE_CHK:
11472 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11473 break;
11474 case BUILT_IN_MEMSET_CHK:
11475 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11476 break;
11477 default:
11478 break;
11481 if (! fn)
11482 return NULL_RTX;
11484 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11485 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11486 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11487 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11489 else if (fcode == BUILT_IN_MEMSET_CHK)
11490 return NULL_RTX;
11491 else
11493 unsigned int dest_align = get_pointer_alignment (dest);
11495 /* If DEST is not a pointer type, call the normal function. */
11496 if (dest_align == 0)
11497 return NULL_RTX;
11499 /* If SRC and DEST are the same (and not volatile), do nothing. */
11500 if (operand_equal_p (src, dest, 0))
11502 tree expr;
11504 if (fcode != BUILT_IN_MEMPCPY_CHK)
11506 /* Evaluate and ignore LEN in case it has side-effects. */
11507 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11508 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11511 expr = fold_build_pointer_plus (dest, len);
11512 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11515 /* __memmove_chk special case. */
11516 if (fcode == BUILT_IN_MEMMOVE_CHK)
11518 unsigned int src_align = get_pointer_alignment (src);
11520 if (src_align == 0)
11521 return NULL_RTX;
11523 /* If src is categorized for a readonly section we can use
11524 normal __memcpy_chk. */
11525 if (readonly_data_expr (src))
11527 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11528 if (!fn)
11529 return NULL_RTX;
11530 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11531 dest, src, len, size);
11532 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11533 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11534 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11537 return NULL_RTX;
11541 /* Emit warning if a buffer overflow is detected at compile time. */
11543 static void
11544 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11546 int is_strlen = 0;
11547 tree len, size;
11548 location_t loc = tree_nonartificial_location (exp);
11550 switch (fcode)
11552 case BUILT_IN_STRCPY_CHK:
11553 case BUILT_IN_STPCPY_CHK:
11554 /* For __strcat_chk the warning will be emitted only if overflowing
11555 by at least strlen (dest) + 1 bytes. */
11556 case BUILT_IN_STRCAT_CHK:
11557 len = CALL_EXPR_ARG (exp, 1);
11558 size = CALL_EXPR_ARG (exp, 2);
11559 is_strlen = 1;
11560 break;
11561 case BUILT_IN_STRNCAT_CHK:
11562 case BUILT_IN_STRNCPY_CHK:
11563 case BUILT_IN_STPNCPY_CHK:
11564 len = CALL_EXPR_ARG (exp, 2);
11565 size = CALL_EXPR_ARG (exp, 3);
11566 break;
11567 case BUILT_IN_SNPRINTF_CHK:
11568 case BUILT_IN_VSNPRINTF_CHK:
11569 len = CALL_EXPR_ARG (exp, 1);
11570 size = CALL_EXPR_ARG (exp, 3);
11571 break;
11572 default:
11573 gcc_unreachable ();
11576 if (!len || !size)
11577 return;
11579 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11580 return;
11582 if (is_strlen)
11584 len = c_strlen (len, 1);
11585 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11586 return;
11588 else if (fcode == BUILT_IN_STRNCAT_CHK)
11590 tree src = CALL_EXPR_ARG (exp, 1);
11591 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11592 return;
11593 src = c_strlen (src, 1);
11594 if (! src || ! tree_fits_uhwi_p (src))
11596 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11597 exp, get_callee_fndecl (exp));
11598 return;
11600 else if (tree_int_cst_lt (src, size))
11601 return;
11603 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11604 return;
11606 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11607 exp, get_callee_fndecl (exp));
11610 /* Emit warning if a buffer overflow is detected at compile time
11611 in __sprintf_chk/__vsprintf_chk calls. */
11613 static void
11614 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11616 tree size, len, fmt;
11617 const char *fmt_str;
11618 int nargs = call_expr_nargs (exp);
11620 /* Verify the required arguments in the original call. */
11622 if (nargs < 4)
11623 return;
11624 size = CALL_EXPR_ARG (exp, 2);
11625 fmt = CALL_EXPR_ARG (exp, 3);
11627 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11628 return;
11630 /* Check whether the format is a literal string constant. */
11631 fmt_str = c_getstr (fmt);
11632 if (fmt_str == NULL)
11633 return;
11635 if (!init_target_chars ())
11636 return;
11638 /* If the format doesn't contain % args or %%, we know its size. */
11639 if (strchr (fmt_str, target_percent) == 0)
11640 len = build_int_cstu (size_type_node, strlen (fmt_str));
11641 /* If the format is "%s" and first ... argument is a string literal,
11642 we know it too. */
11643 else if (fcode == BUILT_IN_SPRINTF_CHK
11644 && strcmp (fmt_str, target_percent_s) == 0)
11646 tree arg;
11648 if (nargs < 5)
11649 return;
11650 arg = CALL_EXPR_ARG (exp, 4);
11651 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11652 return;
11654 len = c_strlen (arg, 1);
11655 if (!len || ! tree_fits_uhwi_p (len))
11656 return;
11658 else
11659 return;
11661 if (! tree_int_cst_lt (len, size))
11662 warning_at (tree_nonartificial_location (exp),
11663 0, "%Kcall to %D will always overflow destination buffer",
11664 exp, get_callee_fndecl (exp));
11667 /* Emit warning if a free is called with address of a variable. */
11669 static void
11670 maybe_emit_free_warning (tree exp)
11672 tree arg = CALL_EXPR_ARG (exp, 0);
11674 STRIP_NOPS (arg);
11675 if (TREE_CODE (arg) != ADDR_EXPR)
11676 return;
11678 arg = get_base_address (TREE_OPERAND (arg, 0));
11679 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11680 return;
11682 if (SSA_VAR_P (arg))
11683 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11684 "%Kattempt to free a non-heap object %qD", exp, arg);
11685 else
11686 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11687 "%Kattempt to free a non-heap object", exp);
11690 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11691 if possible. */
11693 static tree
11694 fold_builtin_object_size (tree ptr, tree ost)
11696 unsigned HOST_WIDE_INT bytes;
11697 int object_size_type;
11699 if (!validate_arg (ptr, POINTER_TYPE)
11700 || !validate_arg (ost, INTEGER_TYPE))
11701 return NULL_TREE;
11703 STRIP_NOPS (ost);
11705 if (TREE_CODE (ost) != INTEGER_CST
11706 || tree_int_cst_sgn (ost) < 0
11707 || compare_tree_int (ost, 3) > 0)
11708 return NULL_TREE;
11710 object_size_type = tree_to_shwi (ost);
11712 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11713 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11714 and (size_t) 0 for types 2 and 3. */
11715 if (TREE_SIDE_EFFECTS (ptr))
11716 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11718 if (TREE_CODE (ptr) == ADDR_EXPR)
11720 bytes = compute_builtin_object_size (ptr, object_size_type);
11721 if (wi::fits_to_tree_p (bytes, size_type_node))
11722 return build_int_cstu (size_type_node, bytes);
11724 else if (TREE_CODE (ptr) == SSA_NAME)
11726 /* If object size is not known yet, delay folding until
11727 later. Maybe subsequent passes will help determining
11728 it. */
11729 bytes = compute_builtin_object_size (ptr, object_size_type);
11730 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11731 && wi::fits_to_tree_p (bytes, size_type_node))
11732 return build_int_cstu (size_type_node, bytes);
11735 return NULL_TREE;
11738 /* Builtins with folding operations that operate on "..." arguments
11739 need special handling; we need to store the arguments in a convenient
11740 data structure before attempting any folding. Fortunately there are
11741 only a few builtins that fall into this category. FNDECL is the
11742 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11743 result of the function call is ignored. */
11745 static tree
11746 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11747 bool ignore ATTRIBUTE_UNUSED)
11749 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11750 tree ret = NULL_TREE;
11752 switch (fcode)
11754 case BUILT_IN_FPCLASSIFY:
11755 ret = fold_builtin_fpclassify (loc, exp);
11756 break;
11758 default:
11759 break;
11761 if (ret)
11763 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11764 SET_EXPR_LOCATION (ret, loc);
11765 TREE_NO_WARNING (ret) = 1;
11766 return ret;
11768 return NULL_TREE;
11771 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
11772 FMT and ARG are the arguments to the call; we don't fold cases with
11773 more than 2 arguments, and ARG may be null if this is a 1-argument case.
11775 Return NULL_TREE if no simplification was possible, otherwise return the
11776 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11777 code of the function to be simplified. */
11779 static tree
11780 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
11781 tree arg, bool ignore,
11782 enum built_in_function fcode)
11784 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
11785 const char *fmt_str = NULL;
11787 /* If the return value is used, don't do the transformation. */
11788 if (! ignore)
11789 return NULL_TREE;
11791 /* Verify the required arguments in the original call. */
11792 if (!validate_arg (fmt, POINTER_TYPE))
11793 return NULL_TREE;
11795 /* Check whether the format is a literal string constant. */
11796 fmt_str = c_getstr (fmt);
11797 if (fmt_str == NULL)
11798 return NULL_TREE;
11800 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
11802 /* If we're using an unlocked function, assume the other
11803 unlocked functions exist explicitly. */
11804 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
11805 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
11807 else
11809 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
11810 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
11813 if (!init_target_chars ())
11814 return NULL_TREE;
11816 if (strcmp (fmt_str, target_percent_s) == 0
11817 || strchr (fmt_str, target_percent) == NULL)
11819 const char *str;
11821 if (strcmp (fmt_str, target_percent_s) == 0)
11823 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11824 return NULL_TREE;
11826 if (!arg || !validate_arg (arg, POINTER_TYPE))
11827 return NULL_TREE;
11829 str = c_getstr (arg);
11830 if (str == NULL)
11831 return NULL_TREE;
11833 else
11835 /* The format specifier doesn't contain any '%' characters. */
11836 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
11837 && arg)
11838 return NULL_TREE;
11839 str = fmt_str;
11842 /* If the string was "", printf does nothing. */
11843 if (str[0] == '\0')
11844 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
11846 /* If the string has length of 1, call putchar. */
11847 if (str[1] == '\0')
11849 /* Given printf("c"), (where c is any one character,)
11850 convert "c"[0] to an int and pass that to the replacement
11851 function. */
11852 newarg = build_int_cst (integer_type_node, str[0]);
11853 if (fn_putchar)
11854 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
11856 else
11858 /* If the string was "string\n", call puts("string"). */
11859 size_t len = strlen (str);
11860 if ((unsigned char)str[len - 1] == target_newline
11861 && (size_t) (int) len == len
11862 && (int) len > 0)
11864 char *newstr;
11865 tree offset_node, string_cst;
11867 /* Create a NUL-terminated string that's one char shorter
11868 than the original, stripping off the trailing '\n'. */
11869 newarg = build_string_literal (len, str);
11870 string_cst = string_constant (newarg, &offset_node);
11871 gcc_checking_assert (string_cst
11872 && (TREE_STRING_LENGTH (string_cst)
11873 == (int) len)
11874 && integer_zerop (offset_node)
11875 && (unsigned char)
11876 TREE_STRING_POINTER (string_cst)[len - 1]
11877 == target_newline);
11878 /* build_string_literal creates a new STRING_CST,
11879 modify it in place to avoid double copying. */
11880 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
11881 newstr[len - 1] = '\0';
11882 if (fn_puts)
11883 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
11885 else
11886 /* We'd like to arrange to call fputs(string,stdout) here,
11887 but we need stdout and don't have a way to get it yet. */
11888 return NULL_TREE;
11892 /* The other optimizations can be done only on the non-va_list variants. */
11893 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11894 return NULL_TREE;
11896 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
11897 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
11899 if (!arg || !validate_arg (arg, POINTER_TYPE))
11900 return NULL_TREE;
11901 if (fn_puts)
11902 call = build_call_expr_loc (loc, fn_puts, 1, arg);
11905 /* If the format specifier was "%c", call __builtin_putchar(arg). */
11906 else if (strcmp (fmt_str, target_percent_c) == 0)
11908 if (!arg || !validate_arg (arg, INTEGER_TYPE))
11909 return NULL_TREE;
11910 if (fn_putchar)
11911 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
11914 if (!call)
11915 return NULL_TREE;
11917 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
11920 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
11921 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
11922 more than 3 arguments, and ARG may be null in the 2-argument case.
11924 Return NULL_TREE if no simplification was possible, otherwise return the
11925 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11926 code of the function to be simplified. */
11928 static tree
11929 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
11930 tree fmt, tree arg, bool ignore,
11931 enum built_in_function fcode)
11933 tree fn_fputc, fn_fputs, call = NULL_TREE;
11934 const char *fmt_str = NULL;
11936 /* If the return value is used, don't do the transformation. */
11937 if (! ignore)
11938 return NULL_TREE;
11940 /* Verify the required arguments in the original call. */
11941 if (!validate_arg (fp, POINTER_TYPE))
11942 return NULL_TREE;
11943 if (!validate_arg (fmt, POINTER_TYPE))
11944 return NULL_TREE;
11946 /* Check whether the format is a literal string constant. */
11947 fmt_str = c_getstr (fmt);
11948 if (fmt_str == NULL)
11949 return NULL_TREE;
11951 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
11953 /* If we're using an unlocked function, assume the other
11954 unlocked functions exist explicitly. */
11955 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
11956 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
11958 else
11960 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
11961 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
11964 if (!init_target_chars ())
11965 return NULL_TREE;
11967 /* If the format doesn't contain % args or %%, use strcpy. */
11968 if (strchr (fmt_str, target_percent) == NULL)
11970 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
11971 && arg)
11972 return NULL_TREE;
11974 /* If the format specifier was "", fprintf does nothing. */
11975 if (fmt_str[0] == '\0')
11977 /* If FP has side-effects, just wait until gimplification is
11978 done. */
11979 if (TREE_SIDE_EFFECTS (fp))
11980 return NULL_TREE;
11982 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
11985 /* When "string" doesn't contain %, replace all cases of
11986 fprintf (fp, string) with fputs (string, fp). The fputs
11987 builtin will take care of special cases like length == 1. */
11988 if (fn_fputs)
11989 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
11992 /* The other optimizations can be done only on the non-va_list variants. */
11993 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
11994 return NULL_TREE;
11996 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
11997 else if (strcmp (fmt_str, target_percent_s) == 0)
11999 if (!arg || !validate_arg (arg, POINTER_TYPE))
12000 return NULL_TREE;
12001 if (fn_fputs)
12002 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12005 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12006 else if (strcmp (fmt_str, target_percent_c) == 0)
12008 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12009 return NULL_TREE;
12010 if (fn_fputc)
12011 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12014 if (!call)
12015 return NULL_TREE;
12016 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12019 /* Initialize format string characters in the target charset. */
12021 bool
12022 init_target_chars (void)
12024 static bool init;
12025 if (!init)
12027 target_newline = lang_hooks.to_target_charset ('\n');
12028 target_percent = lang_hooks.to_target_charset ('%');
12029 target_c = lang_hooks.to_target_charset ('c');
12030 target_s = lang_hooks.to_target_charset ('s');
12031 if (target_newline == 0 || target_percent == 0 || target_c == 0
12032 || target_s == 0)
12033 return false;
12035 target_percent_c[0] = target_percent;
12036 target_percent_c[1] = target_c;
12037 target_percent_c[2] = '\0';
12039 target_percent_s[0] = target_percent;
12040 target_percent_s[1] = target_s;
12041 target_percent_s[2] = '\0';
12043 target_percent_s_newline[0] = target_percent;
12044 target_percent_s_newline[1] = target_s;
12045 target_percent_s_newline[2] = target_newline;
12046 target_percent_s_newline[3] = '\0';
12048 init = true;
12050 return true;
12053 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12054 and no overflow/underflow occurred. INEXACT is true if M was not
12055 exactly calculated. TYPE is the tree type for the result. This
12056 function assumes that you cleared the MPFR flags and then
12057 calculated M to see if anything subsequently set a flag prior to
12058 entering this function. Return NULL_TREE if any checks fail. */
12060 static tree
12061 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12063 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12064 overflow/underflow occurred. If -frounding-math, proceed iff the
12065 result of calling FUNC was exact. */
12066 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12067 && (!flag_rounding_math || !inexact))
12069 REAL_VALUE_TYPE rr;
12071 real_from_mpfr (&rr, m, type, GMP_RNDN);
12072 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12073 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12074 but the mpft_t is not, then we underflowed in the
12075 conversion. */
12076 if (real_isfinite (&rr)
12077 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12079 REAL_VALUE_TYPE rmode;
12081 real_convert (&rmode, TYPE_MODE (type), &rr);
12082 /* Proceed iff the specified mode can hold the value. */
12083 if (real_identical (&rmode, &rr))
12084 return build_real (type, rmode);
12087 return NULL_TREE;
12090 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12091 number and no overflow/underflow occurred. INEXACT is true if M
12092 was not exactly calculated. TYPE is the tree type for the result.
12093 This function assumes that you cleared the MPFR flags and then
12094 calculated M to see if anything subsequently set a flag prior to
12095 entering this function. Return NULL_TREE if any checks fail, if
12096 FORCE_CONVERT is true, then bypass the checks. */
12098 static tree
12099 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12101 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12102 overflow/underflow occurred. If -frounding-math, proceed iff the
12103 result of calling FUNC was exact. */
12104 if (force_convert
12105 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12106 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12107 && (!flag_rounding_math || !inexact)))
12109 REAL_VALUE_TYPE re, im;
12111 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12112 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12113 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12114 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12115 but the mpft_t is not, then we underflowed in the
12116 conversion. */
12117 if (force_convert
12118 || (real_isfinite (&re) && real_isfinite (&im)
12119 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12120 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12122 REAL_VALUE_TYPE re_mode, im_mode;
12124 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12125 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12126 /* Proceed iff the specified mode can hold the value. */
12127 if (force_convert
12128 || (real_identical (&re_mode, &re)
12129 && real_identical (&im_mode, &im)))
12130 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12131 build_real (TREE_TYPE (type), im_mode));
12134 return NULL_TREE;
12137 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12138 FUNC on it and return the resulting value as a tree with type TYPE.
12139 If MIN and/or MAX are not NULL, then the supplied ARG must be
12140 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12141 acceptable values, otherwise they are not. The mpfr precision is
12142 set to the precision of TYPE. We assume that function FUNC returns
12143 zero if the result could be calculated exactly within the requested
12144 precision. */
12146 static tree
12147 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12148 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12149 bool inclusive)
12151 tree result = NULL_TREE;
12153 STRIP_NOPS (arg);
12155 /* To proceed, MPFR must exactly represent the target floating point
12156 format, which only happens when the target base equals two. */
12157 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12158 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12160 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12162 if (real_isfinite (ra)
12163 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12164 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12166 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12167 const int prec = fmt->p;
12168 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12169 int inexact;
12170 mpfr_t m;
12172 mpfr_init2 (m, prec);
12173 mpfr_from_real (m, ra, GMP_RNDN);
12174 mpfr_clear_flags ();
12175 inexact = func (m, m, rnd);
12176 result = do_mpfr_ckconv (m, type, inexact);
12177 mpfr_clear (m);
12181 return result;
12184 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12185 FUNC on it and return the resulting value as a tree with type TYPE.
12186 The mpfr precision is set to the precision of TYPE. We assume that
12187 function FUNC returns zero if the result could be calculated
12188 exactly within the requested precision. */
12190 static tree
12191 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12192 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12194 tree result = NULL_TREE;
12196 STRIP_NOPS (arg1);
12197 STRIP_NOPS (arg2);
12199 /* To proceed, MPFR must exactly represent the target floating point
12200 format, which only happens when the target base equals two. */
12201 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12202 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12203 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12205 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12206 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12208 if (real_isfinite (ra1) && real_isfinite (ra2))
12210 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12211 const int prec = fmt->p;
12212 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12213 int inexact;
12214 mpfr_t m1, m2;
12216 mpfr_inits2 (prec, m1, m2, NULL);
12217 mpfr_from_real (m1, ra1, GMP_RNDN);
12218 mpfr_from_real (m2, ra2, GMP_RNDN);
12219 mpfr_clear_flags ();
12220 inexact = func (m1, m1, m2, rnd);
12221 result = do_mpfr_ckconv (m1, type, inexact);
12222 mpfr_clears (m1, m2, NULL);
12226 return result;
12229 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12230 FUNC on it and return the resulting value as a tree with type TYPE.
12231 The mpfr precision is set to the precision of TYPE. We assume that
12232 function FUNC returns zero if the result could be calculated
12233 exactly within the requested precision. */
12235 static tree
12236 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12237 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12239 tree result = NULL_TREE;
12241 STRIP_NOPS (arg1);
12242 STRIP_NOPS (arg2);
12243 STRIP_NOPS (arg3);
12245 /* To proceed, MPFR must exactly represent the target floating point
12246 format, which only happens when the target base equals two. */
12247 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12248 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12249 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12250 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12252 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12253 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12254 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12256 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12258 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12259 const int prec = fmt->p;
12260 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12261 int inexact;
12262 mpfr_t m1, m2, m3;
12264 mpfr_inits2 (prec, m1, m2, m3, NULL);
12265 mpfr_from_real (m1, ra1, GMP_RNDN);
12266 mpfr_from_real (m2, ra2, GMP_RNDN);
12267 mpfr_from_real (m3, ra3, GMP_RNDN);
12268 mpfr_clear_flags ();
12269 inexact = func (m1, m1, m2, m3, rnd);
12270 result = do_mpfr_ckconv (m1, type, inexact);
12271 mpfr_clears (m1, m2, m3, NULL);
12275 return result;
12278 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12279 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12280 If ARG_SINP and ARG_COSP are NULL then the result is returned
12281 as a complex value.
12282 The type is taken from the type of ARG and is used for setting the
12283 precision of the calculation and results. */
12285 static tree
12286 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12288 tree const type = TREE_TYPE (arg);
12289 tree result = NULL_TREE;
12291 STRIP_NOPS (arg);
12293 /* To proceed, MPFR must exactly represent the target floating point
12294 format, which only happens when the target base equals two. */
12295 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12296 && TREE_CODE (arg) == REAL_CST
12297 && !TREE_OVERFLOW (arg))
12299 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12301 if (real_isfinite (ra))
12303 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12304 const int prec = fmt->p;
12305 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12306 tree result_s, result_c;
12307 int inexact;
12308 mpfr_t m, ms, mc;
12310 mpfr_inits2 (prec, m, ms, mc, NULL);
12311 mpfr_from_real (m, ra, GMP_RNDN);
12312 mpfr_clear_flags ();
12313 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12314 result_s = do_mpfr_ckconv (ms, type, inexact);
12315 result_c = do_mpfr_ckconv (mc, type, inexact);
12316 mpfr_clears (m, ms, mc, NULL);
12317 if (result_s && result_c)
12319 /* If we are to return in a complex value do so. */
12320 if (!arg_sinp && !arg_cosp)
12321 return build_complex (build_complex_type (type),
12322 result_c, result_s);
12324 /* Dereference the sin/cos pointer arguments. */
12325 arg_sinp = build_fold_indirect_ref (arg_sinp);
12326 arg_cosp = build_fold_indirect_ref (arg_cosp);
12327 /* Proceed if valid pointer type were passed in. */
12328 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12329 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12331 /* Set the values. */
12332 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12333 result_s);
12334 TREE_SIDE_EFFECTS (result_s) = 1;
12335 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12336 result_c);
12337 TREE_SIDE_EFFECTS (result_c) = 1;
12338 /* Combine the assignments into a compound expr. */
12339 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12340 result_s, result_c));
12345 return result;
12348 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12349 two-argument mpfr order N Bessel function FUNC on them and return
12350 the resulting value as a tree with type TYPE. The mpfr precision
12351 is set to the precision of TYPE. We assume that function FUNC
12352 returns zero if the result could be calculated exactly within the
12353 requested precision. */
12354 static tree
12355 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12356 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12357 const REAL_VALUE_TYPE *min, bool inclusive)
12359 tree result = NULL_TREE;
12361 STRIP_NOPS (arg1);
12362 STRIP_NOPS (arg2);
12364 /* To proceed, MPFR must exactly represent the target floating point
12365 format, which only happens when the target base equals two. */
12366 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12367 && tree_fits_shwi_p (arg1)
12368 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12370 const HOST_WIDE_INT n = tree_to_shwi (arg1);
12371 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12373 if (n == (long)n
12374 && real_isfinite (ra)
12375 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12377 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12378 const int prec = fmt->p;
12379 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12380 int inexact;
12381 mpfr_t m;
12383 mpfr_init2 (m, prec);
12384 mpfr_from_real (m, ra, GMP_RNDN);
12385 mpfr_clear_flags ();
12386 inexact = func (m, n, m, rnd);
12387 result = do_mpfr_ckconv (m, type, inexact);
12388 mpfr_clear (m);
12392 return result;
12395 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12396 the pointer *(ARG_QUO) and return the result. The type is taken
12397 from the type of ARG0 and is used for setting the precision of the
12398 calculation and results. */
12400 static tree
12401 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12403 tree const type = TREE_TYPE (arg0);
12404 tree result = NULL_TREE;
12406 STRIP_NOPS (arg0);
12407 STRIP_NOPS (arg1);
12409 /* To proceed, MPFR must exactly represent the target floating point
12410 format, which only happens when the target base equals two. */
12411 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12412 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12413 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12415 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12416 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12418 if (real_isfinite (ra0) && real_isfinite (ra1))
12420 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12421 const int prec = fmt->p;
12422 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12423 tree result_rem;
12424 long integer_quo;
12425 mpfr_t m0, m1;
12427 mpfr_inits2 (prec, m0, m1, NULL);
12428 mpfr_from_real (m0, ra0, GMP_RNDN);
12429 mpfr_from_real (m1, ra1, GMP_RNDN);
12430 mpfr_clear_flags ();
12431 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12432 /* Remquo is independent of the rounding mode, so pass
12433 inexact=0 to do_mpfr_ckconv(). */
12434 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12435 mpfr_clears (m0, m1, NULL);
12436 if (result_rem)
12438 /* MPFR calculates quo in the host's long so it may
12439 return more bits in quo than the target int can hold
12440 if sizeof(host long) > sizeof(target int). This can
12441 happen even for native compilers in LP64 mode. In
12442 these cases, modulo the quo value with the largest
12443 number that the target int can hold while leaving one
12444 bit for the sign. */
12445 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12446 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12448 /* Dereference the quo pointer argument. */
12449 arg_quo = build_fold_indirect_ref (arg_quo);
12450 /* Proceed iff a valid pointer type was passed in. */
12451 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12453 /* Set the value. */
12454 tree result_quo
12455 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12456 build_int_cst (TREE_TYPE (arg_quo),
12457 integer_quo));
12458 TREE_SIDE_EFFECTS (result_quo) = 1;
12459 /* Combine the quo assignment with the rem. */
12460 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12461 result_quo, result_rem));
12466 return result;
12469 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12470 resulting value as a tree with type TYPE. The mpfr precision is
12471 set to the precision of TYPE. We assume that this mpfr function
12472 returns zero if the result could be calculated exactly within the
12473 requested precision. In addition, the integer pointer represented
12474 by ARG_SG will be dereferenced and set to the appropriate signgam
12475 (-1,1) value. */
12477 static tree
12478 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12480 tree result = NULL_TREE;
12482 STRIP_NOPS (arg);
12484 /* To proceed, MPFR must exactly represent the target floating point
12485 format, which only happens when the target base equals two. Also
12486 verify ARG is a constant and that ARG_SG is an int pointer. */
12487 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12488 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12489 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12490 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12492 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12494 /* In addition to NaN and Inf, the argument cannot be zero or a
12495 negative integer. */
12496 if (real_isfinite (ra)
12497 && ra->cl != rvc_zero
12498 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12500 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12501 const int prec = fmt->p;
12502 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12503 int inexact, sg;
12504 mpfr_t m;
12505 tree result_lg;
12507 mpfr_init2 (m, prec);
12508 mpfr_from_real (m, ra, GMP_RNDN);
12509 mpfr_clear_flags ();
12510 inexact = mpfr_lgamma (m, &sg, m, rnd);
12511 result_lg = do_mpfr_ckconv (m, type, inexact);
12512 mpfr_clear (m);
12513 if (result_lg)
12515 tree result_sg;
12517 /* Dereference the arg_sg pointer argument. */
12518 arg_sg = build_fold_indirect_ref (arg_sg);
12519 /* Assign the signgam value into *arg_sg. */
12520 result_sg = fold_build2 (MODIFY_EXPR,
12521 TREE_TYPE (arg_sg), arg_sg,
12522 build_int_cst (TREE_TYPE (arg_sg), sg));
12523 TREE_SIDE_EFFECTS (result_sg) = 1;
12524 /* Combine the signgam assignment with the lgamma result. */
12525 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12526 result_sg, result_lg));
12531 return result;
12534 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12535 function FUNC on it and return the resulting value as a tree with
12536 type TYPE. The mpfr precision is set to the precision of TYPE. We
12537 assume that function FUNC returns zero if the result could be
12538 calculated exactly within the requested precision. */
12540 static tree
12541 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12543 tree result = NULL_TREE;
12545 STRIP_NOPS (arg);
12547 /* To proceed, MPFR must exactly represent the target floating point
12548 format, which only happens when the target base equals two. */
12549 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12550 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12551 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12553 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12554 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12556 if (real_isfinite (re) && real_isfinite (im))
12558 const struct real_format *const fmt =
12559 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12560 const int prec = fmt->p;
12561 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12562 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12563 int inexact;
12564 mpc_t m;
12566 mpc_init2 (m, prec);
12567 mpfr_from_real (mpc_realref (m), re, rnd);
12568 mpfr_from_real (mpc_imagref (m), im, rnd);
12569 mpfr_clear_flags ();
12570 inexact = func (m, m, crnd);
12571 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12572 mpc_clear (m);
12576 return result;
12579 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12580 mpc function FUNC on it and return the resulting value as a tree
12581 with type TYPE. The mpfr precision is set to the precision of
12582 TYPE. We assume that function FUNC returns zero if the result
12583 could be calculated exactly within the requested precision. If
12584 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12585 in the arguments and/or results. */
12587 tree
12588 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12589 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12591 tree result = NULL_TREE;
12593 STRIP_NOPS (arg0);
12594 STRIP_NOPS (arg1);
12596 /* To proceed, MPFR must exactly represent the target floating point
12597 format, which only happens when the target base equals two. */
12598 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12599 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12600 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12601 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12602 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12604 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12605 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12606 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12607 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12609 if (do_nonfinite
12610 || (real_isfinite (re0) && real_isfinite (im0)
12611 && real_isfinite (re1) && real_isfinite (im1)))
12613 const struct real_format *const fmt =
12614 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12615 const int prec = fmt->p;
12616 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12617 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12618 int inexact;
12619 mpc_t m0, m1;
12621 mpc_init2 (m0, prec);
12622 mpc_init2 (m1, prec);
12623 mpfr_from_real (mpc_realref (m0), re0, rnd);
12624 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12625 mpfr_from_real (mpc_realref (m1), re1, rnd);
12626 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12627 mpfr_clear_flags ();
12628 inexact = func (m0, m0, m1, crnd);
12629 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12630 mpc_clear (m0);
12631 mpc_clear (m1);
12635 return result;
12638 /* A wrapper function for builtin folding that prevents warnings for
12639 "statement without effect" and the like, caused by removing the
12640 call node earlier than the warning is generated. */
12642 tree
12643 fold_call_stmt (gcall *stmt, bool ignore)
12645 tree ret = NULL_TREE;
12646 tree fndecl = gimple_call_fndecl (stmt);
12647 location_t loc = gimple_location (stmt);
12648 if (fndecl
12649 && TREE_CODE (fndecl) == FUNCTION_DECL
12650 && DECL_BUILT_IN (fndecl)
12651 && !gimple_call_va_arg_pack_p (stmt))
12653 int nargs = gimple_call_num_args (stmt);
12654 tree *args = (nargs > 0
12655 ? gimple_call_arg_ptr (stmt, 0)
12656 : &error_mark_node);
12658 if (avoid_folding_inline_builtin (fndecl))
12659 return NULL_TREE;
12660 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12662 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12664 else
12666 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
12667 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12668 if (ret)
12670 /* Propagate location information from original call to
12671 expansion of builtin. Otherwise things like
12672 maybe_emit_chk_warning, that operate on the expansion
12673 of a builtin, will use the wrong location information. */
12674 if (gimple_has_location (stmt))
12676 tree realret = ret;
12677 if (TREE_CODE (ret) == NOP_EXPR)
12678 realret = TREE_OPERAND (ret, 0);
12679 if (CAN_HAVE_LOCATION_P (realret)
12680 && !EXPR_HAS_LOCATION (realret))
12681 SET_EXPR_LOCATION (realret, loc);
12682 return realret;
12684 return ret;
12688 return NULL_TREE;
12691 /* Look up the function in builtin_decl that corresponds to DECL
12692 and set ASMSPEC as its user assembler name. DECL must be a
12693 function decl that declares a builtin. */
12695 void
12696 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12698 tree builtin;
12699 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12700 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12701 && asmspec != 0);
12703 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12704 set_user_assembler_name (builtin, asmspec);
12705 switch (DECL_FUNCTION_CODE (decl))
12707 case BUILT_IN_MEMCPY:
12708 init_block_move_fn (asmspec);
12709 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12710 break;
12711 case BUILT_IN_MEMSET:
12712 init_block_clear_fn (asmspec);
12713 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12714 break;
12715 case BUILT_IN_MEMMOVE:
12716 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12717 break;
12718 case BUILT_IN_MEMCMP:
12719 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12720 break;
12721 case BUILT_IN_ABORT:
12722 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12723 break;
12724 case BUILT_IN_FFS:
12725 if (INT_TYPE_SIZE < BITS_PER_WORD)
12727 set_user_assembler_libfunc ("ffs", asmspec);
12728 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12729 MODE_INT, 0), "ffs");
12731 break;
12732 default:
12733 break;
12737 /* Return true if DECL is a builtin that expands to a constant or similarly
12738 simple code. */
12739 bool
12740 is_simple_builtin (tree decl)
12742 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12743 switch (DECL_FUNCTION_CODE (decl))
12745 /* Builtins that expand to constants. */
12746 case BUILT_IN_CONSTANT_P:
12747 case BUILT_IN_EXPECT:
12748 case BUILT_IN_OBJECT_SIZE:
12749 case BUILT_IN_UNREACHABLE:
12750 /* Simple register moves or loads from stack. */
12751 case BUILT_IN_ASSUME_ALIGNED:
12752 case BUILT_IN_RETURN_ADDRESS:
12753 case BUILT_IN_EXTRACT_RETURN_ADDR:
12754 case BUILT_IN_FROB_RETURN_ADDR:
12755 case BUILT_IN_RETURN:
12756 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12757 case BUILT_IN_FRAME_ADDRESS:
12758 case BUILT_IN_VA_END:
12759 case BUILT_IN_STACK_SAVE:
12760 case BUILT_IN_STACK_RESTORE:
12761 /* Exception state returns or moves registers around. */
12762 case BUILT_IN_EH_FILTER:
12763 case BUILT_IN_EH_POINTER:
12764 case BUILT_IN_EH_COPY_VALUES:
12765 return true;
12767 default:
12768 return false;
12771 return false;
12774 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12775 most probably expanded inline into reasonably simple code. This is a
12776 superset of is_simple_builtin. */
12777 bool
12778 is_inexpensive_builtin (tree decl)
12780 if (!decl)
12781 return false;
12782 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12783 return true;
12784 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12785 switch (DECL_FUNCTION_CODE (decl))
12787 case BUILT_IN_ABS:
12788 case BUILT_IN_ALLOCA:
12789 case BUILT_IN_ALLOCA_WITH_ALIGN:
12790 case BUILT_IN_BSWAP16:
12791 case BUILT_IN_BSWAP32:
12792 case BUILT_IN_BSWAP64:
12793 case BUILT_IN_CLZ:
12794 case BUILT_IN_CLZIMAX:
12795 case BUILT_IN_CLZL:
12796 case BUILT_IN_CLZLL:
12797 case BUILT_IN_CTZ:
12798 case BUILT_IN_CTZIMAX:
12799 case BUILT_IN_CTZL:
12800 case BUILT_IN_CTZLL:
12801 case BUILT_IN_FFS:
12802 case BUILT_IN_FFSIMAX:
12803 case BUILT_IN_FFSL:
12804 case BUILT_IN_FFSLL:
12805 case BUILT_IN_IMAXABS:
12806 case BUILT_IN_FINITE:
12807 case BUILT_IN_FINITEF:
12808 case BUILT_IN_FINITEL:
12809 case BUILT_IN_FINITED32:
12810 case BUILT_IN_FINITED64:
12811 case BUILT_IN_FINITED128:
12812 case BUILT_IN_FPCLASSIFY:
12813 case BUILT_IN_ISFINITE:
12814 case BUILT_IN_ISINF_SIGN:
12815 case BUILT_IN_ISINF:
12816 case BUILT_IN_ISINFF:
12817 case BUILT_IN_ISINFL:
12818 case BUILT_IN_ISINFD32:
12819 case BUILT_IN_ISINFD64:
12820 case BUILT_IN_ISINFD128:
12821 case BUILT_IN_ISNAN:
12822 case BUILT_IN_ISNANF:
12823 case BUILT_IN_ISNANL:
12824 case BUILT_IN_ISNAND32:
12825 case BUILT_IN_ISNAND64:
12826 case BUILT_IN_ISNAND128:
12827 case BUILT_IN_ISNORMAL:
12828 case BUILT_IN_ISGREATER:
12829 case BUILT_IN_ISGREATEREQUAL:
12830 case BUILT_IN_ISLESS:
12831 case BUILT_IN_ISLESSEQUAL:
12832 case BUILT_IN_ISLESSGREATER:
12833 case BUILT_IN_ISUNORDERED:
12834 case BUILT_IN_VA_ARG_PACK:
12835 case BUILT_IN_VA_ARG_PACK_LEN:
12836 case BUILT_IN_VA_COPY:
12837 case BUILT_IN_TRAP:
12838 case BUILT_IN_SAVEREGS:
12839 case BUILT_IN_POPCOUNTL:
12840 case BUILT_IN_POPCOUNTLL:
12841 case BUILT_IN_POPCOUNTIMAX:
12842 case BUILT_IN_POPCOUNT:
12843 case BUILT_IN_PARITYL:
12844 case BUILT_IN_PARITYLL:
12845 case BUILT_IN_PARITYIMAX:
12846 case BUILT_IN_PARITY:
12847 case BUILT_IN_LABS:
12848 case BUILT_IN_LLABS:
12849 case BUILT_IN_PREFETCH:
12850 return true;
12852 default:
12853 return is_simple_builtin (decl);
12856 return false;