gcc/
[official-gcc.git] / gcc / builtins.c
blob971e525f19f3ab0dd7dc8abc511f120960be3165
1 /* Expand builtin functions.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "calls.h"
30 #include "varasm.h"
31 #include "tree-object-size.h"
32 #include "realmpfr.h"
33 #include "basic-block.h"
34 #include "tree-ssa-alias.h"
35 #include "internal-fn.h"
36 #include "gimple-expr.h"
37 #include "is-a.h"
38 #include "gimple.h"
39 #include "flags.h"
40 #include "regs.h"
41 #include "hard-reg-set.h"
42 #include "except.h"
43 #include "function.h"
44 #include "insn-config.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "recog.h"
49 #include "output.h"
50 #include "typeclass.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "langhooks.h"
55 #include "tree-ssanames.h"
56 #include "tree-dfa.h"
57 #include "value-prof.h"
58 #include "diagnostic-core.h"
59 #include "builtins.h"
60 #include "ubsan.h"
61 #include "cilk.h"
64 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
66 struct target_builtins default_target_builtins;
67 #if SWITCHABLE_TARGET
68 struct target_builtins *this_target_builtins = &default_target_builtins;
69 #endif
71 /* Define the names of the builtin function types and codes. */
72 const char *const built_in_class_names[BUILT_IN_LAST]
73 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
75 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
76 const char * built_in_names[(int) END_BUILTINS] =
78 #include "builtins.def"
80 #undef DEF_BUILTIN
82 /* Setup an array of _DECL trees, make sure each element is
83 initialized to NULL_TREE. */
84 builtin_info_type builtin_info;
86 /* Non-zero if __builtin_constant_p should be folded right away. */
87 bool force_folding_builtin_constant_p;
89 static rtx c_readstr (const char *, enum machine_mode);
90 static int target_char_cast (tree, char *);
91 static rtx get_memory_rtx (tree, tree);
92 static int apply_args_size (void);
93 static int apply_result_size (void);
94 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
95 static rtx result_vector (int, rtx);
96 #endif
97 static void expand_builtin_update_setjmp_buf (rtx);
98 static void expand_builtin_prefetch (tree);
99 static rtx expand_builtin_apply_args (void);
100 static rtx expand_builtin_apply_args_1 (void);
101 static rtx expand_builtin_apply (rtx, rtx, rtx);
102 static void expand_builtin_return (rtx);
103 static enum type_class type_to_class (tree);
104 static rtx expand_builtin_classify_type (tree);
105 static void expand_errno_check (tree, rtx);
106 static rtx expand_builtin_mathfn (tree, rtx, rtx);
107 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
109 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
110 static rtx expand_builtin_interclass_mathfn (tree, rtx);
111 static rtx expand_builtin_sincos (tree);
112 static rtx expand_builtin_cexpi (tree, rtx);
113 static rtx expand_builtin_int_roundingfn (tree, rtx);
114 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
115 static rtx expand_builtin_next_arg (void);
116 static rtx expand_builtin_va_start (tree);
117 static rtx expand_builtin_va_end (tree);
118 static rtx expand_builtin_va_copy (tree);
119 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strcmp (tree, rtx);
121 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
122 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
123 static rtx expand_builtin_memcpy (tree, rtx);
124 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
126 enum machine_mode, int);
127 static rtx expand_builtin_strcpy (tree, rtx);
128 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
129 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx);
131 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
132 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
133 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
134 static rtx expand_builtin_bzero (tree);
135 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_alloca (tree, bool);
137 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
138 static rtx expand_builtin_frame_address (tree, tree);
139 static tree stabilize_va_list_loc (location_t, tree, int);
140 static rtx expand_builtin_expect (tree, rtx);
141 static tree fold_builtin_constant_p (tree);
142 static tree fold_builtin_classify_type (tree);
143 static tree fold_builtin_strlen (location_t, tree, tree);
144 static tree fold_builtin_inf (location_t, tree, int);
145 static tree fold_builtin_nan (tree, tree, int);
146 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
147 static bool validate_arg (const_tree, enum tree_code code);
148 static bool integer_valued_real_p (tree);
149 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
150 static rtx expand_builtin_fabs (tree, rtx, rtx);
151 static rtx expand_builtin_signbit (tree, rtx);
152 static tree fold_builtin_sqrt (location_t, tree, tree);
153 static tree fold_builtin_cbrt (location_t, tree, tree);
154 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
155 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
156 static tree fold_builtin_cos (location_t, tree, tree, tree);
157 static tree fold_builtin_cosh (location_t, tree, tree, tree);
158 static tree fold_builtin_tan (tree, tree);
159 static tree fold_builtin_trunc (location_t, tree, tree);
160 static tree fold_builtin_floor (location_t, tree, tree);
161 static tree fold_builtin_ceil (location_t, tree, tree);
162 static tree fold_builtin_round (location_t, tree, tree);
163 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
164 static tree fold_builtin_bitop (tree, tree);
165 static tree fold_builtin_strchr (location_t, tree, tree, tree);
166 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
167 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
168 static tree fold_builtin_strcmp (location_t, tree, tree);
169 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
170 static tree fold_builtin_signbit (location_t, tree, tree);
171 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
172 static tree fold_builtin_isascii (location_t, tree);
173 static tree fold_builtin_toascii (location_t, tree);
174 static tree fold_builtin_isdigit (location_t, tree);
175 static tree fold_builtin_fabs (location_t, tree, tree);
176 static tree fold_builtin_abs (location_t, tree, tree);
177 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
178 enum tree_code);
179 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
180 static tree fold_builtin_0 (location_t, tree, bool);
181 static tree fold_builtin_1 (location_t, tree, tree, bool);
182 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
183 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
184 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
185 static tree fold_builtin_varargs (location_t, tree, tree, bool);
187 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
188 static tree fold_builtin_strstr (location_t, tree, tree, tree);
189 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
190 static tree fold_builtin_strncat (location_t, tree, tree, tree);
191 static tree fold_builtin_strspn (location_t, tree, tree);
192 static tree fold_builtin_strcspn (location_t, tree, tree);
194 static rtx expand_builtin_object_size (tree);
195 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
196 enum built_in_function);
197 static void maybe_emit_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
199 static void maybe_emit_free_warning (tree);
200 static tree fold_builtin_object_size (tree, tree);
201 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
202 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
203 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
204 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
205 enum built_in_function);
207 static unsigned HOST_WIDE_INT target_newline;
208 unsigned HOST_WIDE_INT target_percent;
209 static unsigned HOST_WIDE_INT target_c;
210 static unsigned HOST_WIDE_INT target_s;
211 static char target_percent_c[3];
212 char target_percent_s[3];
213 static char target_percent_s_newline[4];
214 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
215 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
216 static tree do_mpfr_arg2 (tree, tree, tree,
217 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
218 static tree do_mpfr_arg3 (tree, tree, tree, tree,
219 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
220 static tree do_mpfr_sincos (tree, tree, tree);
221 static tree do_mpfr_bessel_n (tree, tree, tree,
222 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
223 const REAL_VALUE_TYPE *, bool);
224 static tree do_mpfr_remquo (tree, tree, tree);
225 static tree do_mpfr_lgamma_r (tree, tree, tree);
226 static void expand_builtin_sync_synchronize (void);
228 /* Return true if NAME starts with __builtin_ or __sync_. */
230 static bool
231 is_builtin_name (const char *name)
233 if (strncmp (name, "__builtin_", 10) == 0)
234 return true;
235 if (strncmp (name, "__sync_", 7) == 0)
236 return true;
237 if (strncmp (name, "__atomic_", 9) == 0)
238 return true;
239 if (flag_cilkplus
240 && (!strcmp (name, "__cilkrts_detach")
241 || !strcmp (name, "__cilkrts_pop_frame")))
242 return true;
243 return false;
247 /* Return true if DECL is a function symbol representing a built-in. */
249 bool
250 is_builtin_fn (tree decl)
252 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
255 /* Return true if NODE should be considered for inline expansion regardless
256 of the optimization level. This means whenever a function is invoked with
257 its "internal" name, which normally contains the prefix "__builtin". */
259 static bool
260 called_as_built_in (tree node)
262 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
263 we want the name used to call the function, not the name it
264 will have. */
265 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
266 return is_builtin_name (name);
269 /* Compute values M and N such that M divides (address of EXP - N) and such
270 that N < M. If these numbers can be determined, store M in alignp and N in
271 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
272 *alignp and any bit-offset to *bitposp.
274 Note that the address (and thus the alignment) computed here is based
275 on the address to which a symbol resolves, whereas DECL_ALIGN is based
276 on the address at which an object is actually located. These two
277 addresses are not always the same. For example, on ARM targets,
278 the address &foo of a Thumb function foo() has the lowest bit set,
279 whereas foo() itself starts on an even address.
281 If ADDR_P is true we are taking the address of the memory reference EXP
282 and thus cannot rely on the access taking place. */
284 static bool
285 get_object_alignment_2 (tree exp, unsigned int *alignp,
286 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
288 HOST_WIDE_INT bitsize, bitpos;
289 tree offset;
290 enum machine_mode mode;
291 int unsignedp, volatilep;
292 unsigned int align = BITS_PER_UNIT;
293 bool known_alignment = false;
295 /* Get the innermost object and the constant (bitpos) and possibly
296 variable (offset) offset of the access. */
297 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
298 &mode, &unsignedp, &volatilep, true);
300 /* Extract alignment information from the innermost object and
301 possibly adjust bitpos and offset. */
302 if (TREE_CODE (exp) == FUNCTION_DECL)
304 /* Function addresses can encode extra information besides their
305 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
306 allows the low bit to be used as a virtual bit, we know
307 that the address itself must be at least 2-byte aligned. */
308 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
309 align = 2 * BITS_PER_UNIT;
311 else if (TREE_CODE (exp) == LABEL_DECL)
313 else if (TREE_CODE (exp) == CONST_DECL)
315 /* The alignment of a CONST_DECL is determined by its initializer. */
316 exp = DECL_INITIAL (exp);
317 align = TYPE_ALIGN (TREE_TYPE (exp));
318 #ifdef CONSTANT_ALIGNMENT
319 if (CONSTANT_CLASS_P (exp))
320 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
321 #endif
322 known_alignment = true;
324 else if (DECL_P (exp))
326 align = DECL_ALIGN (exp);
327 known_alignment = true;
329 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
331 align = TYPE_ALIGN (TREE_TYPE (exp));
333 else if (TREE_CODE (exp) == INDIRECT_REF
334 || TREE_CODE (exp) == MEM_REF
335 || TREE_CODE (exp) == TARGET_MEM_REF)
337 tree addr = TREE_OPERAND (exp, 0);
338 unsigned ptr_align;
339 unsigned HOST_WIDE_INT ptr_bitpos;
341 if (TREE_CODE (addr) == BIT_AND_EXPR
342 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
344 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
345 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
346 align *= BITS_PER_UNIT;
347 addr = TREE_OPERAND (addr, 0);
350 known_alignment
351 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
352 align = MAX (ptr_align, align);
354 /* The alignment of the pointer operand in a TARGET_MEM_REF
355 has to take the variable offset parts into account. */
356 if (TREE_CODE (exp) == TARGET_MEM_REF)
358 if (TMR_INDEX (exp))
360 unsigned HOST_WIDE_INT step = 1;
361 if (TMR_STEP (exp))
362 step = TREE_INT_CST_LOW (TMR_STEP (exp));
363 align = MIN (align, (step & -step) * BITS_PER_UNIT);
365 if (TMR_INDEX2 (exp))
366 align = BITS_PER_UNIT;
367 known_alignment = false;
370 /* When EXP is an actual memory reference then we can use
371 TYPE_ALIGN of a pointer indirection to derive alignment.
372 Do so only if get_pointer_alignment_1 did not reveal absolute
373 alignment knowledge and if using that alignment would
374 improve the situation. */
375 if (!addr_p && !known_alignment
376 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
377 align = TYPE_ALIGN (TREE_TYPE (exp));
378 else
380 /* Else adjust bitpos accordingly. */
381 bitpos += ptr_bitpos;
382 if (TREE_CODE (exp) == MEM_REF
383 || TREE_CODE (exp) == TARGET_MEM_REF)
384 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
387 else if (TREE_CODE (exp) == STRING_CST)
389 /* STRING_CST are the only constant objects we allow to be not
390 wrapped inside a CONST_DECL. */
391 align = TYPE_ALIGN (TREE_TYPE (exp));
392 #ifdef CONSTANT_ALIGNMENT
393 if (CONSTANT_CLASS_P (exp))
394 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
395 #endif
396 known_alignment = true;
399 /* If there is a non-constant offset part extract the maximum
400 alignment that can prevail. */
401 if (offset)
403 unsigned int trailing_zeros = tree_ctz (offset);
404 if (trailing_zeros < HOST_BITS_PER_INT)
406 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
407 if (inner)
408 align = MIN (align, inner);
412 *alignp = align;
413 *bitposp = bitpos & (*alignp - 1);
414 return known_alignment;
417 /* For a memory reference expression EXP compute values M and N such that M
418 divides (&EXP - N) and such that N < M. If these numbers can be determined,
419 store M in alignp and N in *BITPOSP and return true. Otherwise return false
420 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
422 bool
423 get_object_alignment_1 (tree exp, unsigned int *alignp,
424 unsigned HOST_WIDE_INT *bitposp)
426 return get_object_alignment_2 (exp, alignp, bitposp, false);
429 /* Return the alignment in bits of EXP, an object. */
431 unsigned int
432 get_object_alignment (tree exp)
434 unsigned HOST_WIDE_INT bitpos = 0;
435 unsigned int align;
437 get_object_alignment_1 (exp, &align, &bitpos);
439 /* align and bitpos now specify known low bits of the pointer.
440 ptr & (align - 1) == bitpos. */
442 if (bitpos != 0)
443 align = (bitpos & -bitpos);
444 return align;
447 /* For a pointer valued expression EXP compute values M and N such that M
448 divides (EXP - N) and such that N < M. If these numbers can be determined,
449 store M in alignp and N in *BITPOSP and return true. Return false if
450 the results are just a conservative approximation.
452 If EXP is not a pointer, false is returned too. */
454 bool
455 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
456 unsigned HOST_WIDE_INT *bitposp)
458 STRIP_NOPS (exp);
460 if (TREE_CODE (exp) == ADDR_EXPR)
461 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
462 alignp, bitposp, true);
463 else if (TREE_CODE (exp) == SSA_NAME
464 && POINTER_TYPE_P (TREE_TYPE (exp)))
466 unsigned int ptr_align, ptr_misalign;
467 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
469 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
471 *bitposp = ptr_misalign * BITS_PER_UNIT;
472 *alignp = ptr_align * BITS_PER_UNIT;
473 /* We cannot really tell whether this result is an approximation. */
474 return true;
476 else
478 *bitposp = 0;
479 *alignp = BITS_PER_UNIT;
480 return false;
483 else if (TREE_CODE (exp) == INTEGER_CST)
485 *alignp = BIGGEST_ALIGNMENT;
486 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
487 & (BIGGEST_ALIGNMENT - 1));
488 return true;
491 *bitposp = 0;
492 *alignp = BITS_PER_UNIT;
493 return false;
496 /* Return the alignment in bits of EXP, a pointer valued expression.
497 The alignment returned is, by default, the alignment of the thing that
498 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
500 Otherwise, look at the expression to see if we can do better, i.e., if the
501 expression is actually pointing at an object whose alignment is tighter. */
503 unsigned int
504 get_pointer_alignment (tree exp)
506 unsigned HOST_WIDE_INT bitpos = 0;
507 unsigned int align;
509 get_pointer_alignment_1 (exp, &align, &bitpos);
511 /* align and bitpos now specify known low bits of the pointer.
512 ptr & (align - 1) == bitpos. */
514 if (bitpos != 0)
515 align = (bitpos & -bitpos);
517 return align;
520 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
521 way, because it could contain a zero byte in the middle.
522 TREE_STRING_LENGTH is the size of the character array, not the string.
524 ONLY_VALUE should be nonzero if the result is not going to be emitted
525 into the instruction stream and zero if it is going to be expanded.
526 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
527 is returned, otherwise NULL, since
528 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
529 evaluate the side-effects.
531 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
532 accesses. Note that this implies the result is not going to be emitted
533 into the instruction stream.
535 The value returned is of type `ssizetype'.
537 Unfortunately, string_constant can't access the values of const char
538 arrays with initializers, so neither can we do so here. */
540 tree
541 c_strlen (tree src, int only_value)
543 tree offset_node;
544 HOST_WIDE_INT offset;
545 int max;
546 const char *ptr;
547 location_t loc;
549 STRIP_NOPS (src);
550 if (TREE_CODE (src) == COND_EXPR
551 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
553 tree len1, len2;
555 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
556 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
557 if (tree_int_cst_equal (len1, len2))
558 return len1;
561 if (TREE_CODE (src) == COMPOUND_EXPR
562 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
563 return c_strlen (TREE_OPERAND (src, 1), only_value);
565 loc = EXPR_LOC_OR_LOC (src, input_location);
567 src = string_constant (src, &offset_node);
568 if (src == 0)
569 return NULL_TREE;
571 max = TREE_STRING_LENGTH (src) - 1;
572 ptr = TREE_STRING_POINTER (src);
574 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
576 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
577 compute the offset to the following null if we don't know where to
578 start searching for it. */
579 int i;
581 for (i = 0; i < max; i++)
582 if (ptr[i] == 0)
583 return NULL_TREE;
585 /* We don't know the starting offset, but we do know that the string
586 has no internal zero bytes. We can assume that the offset falls
587 within the bounds of the string; otherwise, the programmer deserves
588 what he gets. Subtract the offset from the length of the string,
589 and return that. This would perhaps not be valid if we were dealing
590 with named arrays in addition to literal string constants. */
592 return size_diffop_loc (loc, size_int (max), offset_node);
595 /* We have a known offset into the string. Start searching there for
596 a null character if we can represent it as a single HOST_WIDE_INT. */
597 if (offset_node == 0)
598 offset = 0;
599 else if (! tree_fits_shwi_p (offset_node))
600 offset = -1;
601 else
602 offset = tree_to_shwi (offset_node);
604 /* If the offset is known to be out of bounds, warn, and call strlen at
605 runtime. */
606 if (offset < 0 || offset > max)
608 /* Suppress multiple warnings for propagated constant strings. */
609 if (only_value != 2
610 && !TREE_NO_WARNING (src))
612 warning_at (loc, 0, "offset outside bounds of constant string");
613 TREE_NO_WARNING (src) = 1;
615 return NULL_TREE;
618 /* Use strlen to search for the first zero byte. Since any strings
619 constructed with build_string will have nulls appended, we win even
620 if we get handed something like (char[4])"abcd".
622 Since OFFSET is our starting index into the string, no further
623 calculation is needed. */
624 return ssize_int (strlen (ptr + offset));
627 /* Return a char pointer for a C string if it is a string constant
628 or sum of string constant and integer constant. */
630 const char *
631 c_getstr (tree src)
633 tree offset_node;
635 src = string_constant (src, &offset_node);
636 if (src == 0)
637 return 0;
639 if (offset_node == 0)
640 return TREE_STRING_POINTER (src);
641 else if (!tree_fits_uhwi_p (offset_node)
642 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
643 return 0;
645 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
648 /* Return a constant integer corresponding to target reading
649 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
651 static rtx
652 c_readstr (const char *str, enum machine_mode mode)
654 HOST_WIDE_INT ch;
655 unsigned int i, j;
656 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
658 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
659 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
660 / HOST_BITS_PER_WIDE_INT;
662 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
663 for (i = 0; i < len; i++)
664 tmp[i] = 0;
666 ch = 1;
667 for (i = 0; i < GET_MODE_SIZE (mode); i++)
669 j = i;
670 if (WORDS_BIG_ENDIAN)
671 j = GET_MODE_SIZE (mode) - i - 1;
672 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
673 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
674 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
675 j *= BITS_PER_UNIT;
677 if (ch)
678 ch = (unsigned char) str[i];
679 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
682 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
683 return immed_wide_int_const (c, mode);
686 /* Cast a target constant CST to target CHAR and if that value fits into
687 host char type, return zero and put that value into variable pointed to by
688 P. */
690 static int
691 target_char_cast (tree cst, char *p)
693 unsigned HOST_WIDE_INT val, hostval;
695 if (TREE_CODE (cst) != INTEGER_CST
696 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
697 return 1;
699 /* Do not care if it fits or not right here. */
700 val = TREE_INT_CST_LOW (cst);
702 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
703 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
705 hostval = val;
706 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
707 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
709 if (val != hostval)
710 return 1;
712 *p = hostval;
713 return 0;
716 /* Similar to save_expr, but assumes that arbitrary code is not executed
717 in between the multiple evaluations. In particular, we assume that a
718 non-addressable local variable will not be modified. */
720 static tree
721 builtin_save_expr (tree exp)
723 if (TREE_CODE (exp) == SSA_NAME
724 || (TREE_ADDRESSABLE (exp) == 0
725 && (TREE_CODE (exp) == PARM_DECL
726 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
727 return exp;
729 return save_expr (exp);
732 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
733 times to get the address of either a higher stack frame, or a return
734 address located within it (depending on FNDECL_CODE). */
736 static rtx
737 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
739 int i;
741 #ifdef INITIAL_FRAME_ADDRESS_RTX
742 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
743 #else
744 rtx tem;
746 /* For a zero count with __builtin_return_address, we don't care what
747 frame address we return, because target-specific definitions will
748 override us. Therefore frame pointer elimination is OK, and using
749 the soft frame pointer is OK.
751 For a nonzero count, or a zero count with __builtin_frame_address,
752 we require a stable offset from the current frame pointer to the
753 previous one, so we must use the hard frame pointer, and
754 we must disable frame pointer elimination. */
755 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
756 tem = frame_pointer_rtx;
757 else
759 tem = hard_frame_pointer_rtx;
761 /* Tell reload not to eliminate the frame pointer. */
762 crtl->accesses_prior_frames = 1;
764 #endif
766 /* Some machines need special handling before we can access
767 arbitrary frames. For example, on the SPARC, we must first flush
768 all register windows to the stack. */
769 #ifdef SETUP_FRAME_ADDRESSES
770 if (count > 0)
771 SETUP_FRAME_ADDRESSES ();
772 #endif
774 /* On the SPARC, the return address is not in the frame, it is in a
775 register. There is no way to access it off of the current frame
776 pointer, but it can be accessed off the previous frame pointer by
777 reading the value from the register window save area. */
778 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
779 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
780 count--;
781 #endif
783 /* Scan back COUNT frames to the specified frame. */
784 for (i = 0; i < count; i++)
786 /* Assume the dynamic chain pointer is in the word that the
787 frame address points to, unless otherwise specified. */
788 #ifdef DYNAMIC_CHAIN_ADDRESS
789 tem = DYNAMIC_CHAIN_ADDRESS (tem);
790 #endif
791 tem = memory_address (Pmode, tem);
792 tem = gen_frame_mem (Pmode, tem);
793 tem = copy_to_reg (tem);
796 /* For __builtin_frame_address, return what we've got. But, on
797 the SPARC for example, we may have to add a bias. */
798 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
799 #ifdef FRAME_ADDR_RTX
800 return FRAME_ADDR_RTX (tem);
801 #else
802 return tem;
803 #endif
805 /* For __builtin_return_address, get the return address from that frame. */
806 #ifdef RETURN_ADDR_RTX
807 tem = RETURN_ADDR_RTX (count, tem);
808 #else
809 tem = memory_address (Pmode,
810 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
811 tem = gen_frame_mem (Pmode, tem);
812 #endif
813 return tem;
816 /* Alias set used for setjmp buffer. */
817 static alias_set_type setjmp_alias_set = -1;
819 /* Construct the leading half of a __builtin_setjmp call. Control will
820 return to RECEIVER_LABEL. This is also called directly by the SJLJ
821 exception handling code. */
823 void
824 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
826 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
827 rtx stack_save;
828 rtx mem;
830 if (setjmp_alias_set == -1)
831 setjmp_alias_set = new_alias_set ();
833 buf_addr = convert_memory_address (Pmode, buf_addr);
835 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
837 /* We store the frame pointer and the address of receiver_label in
838 the buffer and use the rest of it for the stack save area, which
839 is machine-dependent. */
841 mem = gen_rtx_MEM (Pmode, buf_addr);
842 set_mem_alias_set (mem, setjmp_alias_set);
843 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
845 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
846 GET_MODE_SIZE (Pmode))),
847 set_mem_alias_set (mem, setjmp_alias_set);
849 emit_move_insn (validize_mem (mem),
850 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
852 stack_save = gen_rtx_MEM (sa_mode,
853 plus_constant (Pmode, buf_addr,
854 2 * GET_MODE_SIZE (Pmode)));
855 set_mem_alias_set (stack_save, setjmp_alias_set);
856 emit_stack_save (SAVE_NONLOCAL, &stack_save);
858 /* If there is further processing to do, do it. */
859 #ifdef HAVE_builtin_setjmp_setup
860 if (HAVE_builtin_setjmp_setup)
861 emit_insn (gen_builtin_setjmp_setup (buf_addr));
862 #endif
864 /* We have a nonlocal label. */
865 cfun->has_nonlocal_label = 1;
868 /* Construct the trailing part of a __builtin_setjmp call. This is
869 also called directly by the SJLJ exception handling code.
870 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
872 void
873 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
875 rtx chain;
877 /* Mark the FP as used when we get here, so we have to make sure it's
878 marked as used by this function. */
879 emit_use (hard_frame_pointer_rtx);
881 /* Mark the static chain as clobbered here so life information
882 doesn't get messed up for it. */
883 chain = targetm.calls.static_chain (current_function_decl, true);
884 if (chain && REG_P (chain))
885 emit_clobber (chain);
887 /* Now put in the code to restore the frame pointer, and argument
888 pointer, if needed. */
889 #ifdef HAVE_nonlocal_goto
890 if (! HAVE_nonlocal_goto)
891 #endif
893 /* First adjust our frame pointer to its actual value. It was
894 previously set to the start of the virtual area corresponding to
895 the stacked variables when we branched here and now needs to be
896 adjusted to the actual hardware fp value.
898 Assignments to virtual registers are converted by
899 instantiate_virtual_regs into the corresponding assignment
900 to the underlying register (fp in this case) that makes
901 the original assignment true.
902 So the following insn will actually be decrementing fp by
903 STARTING_FRAME_OFFSET. */
904 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
906 /* Restoring the frame pointer also modifies the hard frame pointer.
907 Mark it used (so that the previous assignment remains live once
908 the frame pointer is eliminated) and clobbered (to represent the
909 implicit update from the assignment). */
910 emit_use (hard_frame_pointer_rtx);
911 emit_clobber (hard_frame_pointer_rtx);
914 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
915 if (fixed_regs[ARG_POINTER_REGNUM])
917 #ifdef ELIMINABLE_REGS
918 /* If the argument pointer can be eliminated in favor of the
919 frame pointer, we don't need to restore it. We assume here
920 that if such an elimination is present, it can always be used.
921 This is the case on all known machines; if we don't make this
922 assumption, we do unnecessary saving on many machines. */
923 size_t i;
924 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
926 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
927 if (elim_regs[i].from == ARG_POINTER_REGNUM
928 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
929 break;
931 if (i == ARRAY_SIZE (elim_regs))
932 #endif
934 /* Now restore our arg pointer from the address at which it
935 was saved in our stack frame. */
936 emit_move_insn (crtl->args.internal_arg_pointer,
937 copy_to_reg (get_arg_pointer_save_area ()));
940 #endif
942 #ifdef HAVE_builtin_setjmp_receiver
943 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
944 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
945 else
946 #endif
947 #ifdef HAVE_nonlocal_goto_receiver
948 if (HAVE_nonlocal_goto_receiver)
949 emit_insn (gen_nonlocal_goto_receiver ());
950 else
951 #endif
952 { /* Nothing */ }
954 /* We must not allow the code we just generated to be reordered by
955 scheduling. Specifically, the update of the frame pointer must
956 happen immediately, not later. */
957 emit_insn (gen_blockage ());
960 /* __builtin_longjmp is passed a pointer to an array of five words (not
961 all will be used on all machines). It operates similarly to the C
962 library function of the same name, but is more efficient. Much of
963 the code below is copied from the handling of non-local gotos. */
965 static void
966 expand_builtin_longjmp (rtx buf_addr, rtx value)
968 rtx fp, lab, stack;
969 rtx_insn *insn, *last;
970 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
972 /* DRAP is needed for stack realign if longjmp is expanded to current
973 function */
974 if (SUPPORTS_STACK_ALIGNMENT)
975 crtl->need_drap = true;
977 if (setjmp_alias_set == -1)
978 setjmp_alias_set = new_alias_set ();
980 buf_addr = convert_memory_address (Pmode, buf_addr);
982 buf_addr = force_reg (Pmode, buf_addr);
984 /* We require that the user must pass a second argument of 1, because
985 that is what builtin_setjmp will return. */
986 gcc_assert (value == const1_rtx);
988 last = get_last_insn ();
989 #ifdef HAVE_builtin_longjmp
990 if (HAVE_builtin_longjmp)
991 emit_insn (gen_builtin_longjmp (buf_addr));
992 else
993 #endif
995 fp = gen_rtx_MEM (Pmode, buf_addr);
996 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
997 GET_MODE_SIZE (Pmode)));
999 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1000 2 * GET_MODE_SIZE (Pmode)));
1001 set_mem_alias_set (fp, setjmp_alias_set);
1002 set_mem_alias_set (lab, setjmp_alias_set);
1003 set_mem_alias_set (stack, setjmp_alias_set);
1005 /* Pick up FP, label, and SP from the block and jump. This code is
1006 from expand_goto in stmt.c; see there for detailed comments. */
1007 #ifdef HAVE_nonlocal_goto
1008 if (HAVE_nonlocal_goto)
1009 /* We have to pass a value to the nonlocal_goto pattern that will
1010 get copied into the static_chain pointer, but it does not matter
1011 what that value is, because builtin_setjmp does not use it. */
1012 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1013 else
1014 #endif
1016 lab = copy_to_reg (lab);
1018 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1019 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1021 emit_move_insn (hard_frame_pointer_rtx, fp);
1022 emit_stack_restore (SAVE_NONLOCAL, stack);
1024 emit_use (hard_frame_pointer_rtx);
1025 emit_use (stack_pointer_rtx);
1026 emit_indirect_jump (lab);
1030 /* Search backwards and mark the jump insn as a non-local goto.
1031 Note that this precludes the use of __builtin_longjmp to a
1032 __builtin_setjmp target in the same function. However, we've
1033 already cautioned the user that these functions are for
1034 internal exception handling use only. */
1035 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1037 gcc_assert (insn != last);
1039 if (JUMP_P (insn))
1041 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1042 break;
1044 else if (CALL_P (insn))
1045 break;
1049 static inline bool
1050 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1052 return (iter->i < iter->n);
1055 /* This function validates the types of a function call argument list
1056 against a specified list of tree_codes. If the last specifier is a 0,
1057 that represents an ellipses, otherwise the last specifier must be a
1058 VOID_TYPE. */
1060 static bool
1061 validate_arglist (const_tree callexpr, ...)
1063 enum tree_code code;
1064 bool res = 0;
1065 va_list ap;
1066 const_call_expr_arg_iterator iter;
1067 const_tree arg;
1069 va_start (ap, callexpr);
1070 init_const_call_expr_arg_iterator (callexpr, &iter);
1074 code = (enum tree_code) va_arg (ap, int);
1075 switch (code)
1077 case 0:
1078 /* This signifies an ellipses, any further arguments are all ok. */
1079 res = true;
1080 goto end;
1081 case VOID_TYPE:
1082 /* This signifies an endlink, if no arguments remain, return
1083 true, otherwise return false. */
1084 res = !more_const_call_expr_args_p (&iter);
1085 goto end;
1086 default:
1087 /* If no parameters remain or the parameter's code does not
1088 match the specified code, return false. Otherwise continue
1089 checking any remaining arguments. */
1090 arg = next_const_call_expr_arg (&iter);
1091 if (!validate_arg (arg, code))
1092 goto end;
1093 break;
1096 while (1);
1098 /* We need gotos here since we can only have one VA_CLOSE in a
1099 function. */
1100 end: ;
1101 va_end (ap);
1103 return res;
1106 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1107 and the address of the save area. */
1109 static rtx
1110 expand_builtin_nonlocal_goto (tree exp)
1112 tree t_label, t_save_area;
1113 rtx r_label, r_save_area, r_fp, r_sp;
1114 rtx_insn *insn;
1116 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1117 return NULL_RTX;
1119 t_label = CALL_EXPR_ARG (exp, 0);
1120 t_save_area = CALL_EXPR_ARG (exp, 1);
1122 r_label = expand_normal (t_label);
1123 r_label = convert_memory_address (Pmode, r_label);
1124 r_save_area = expand_normal (t_save_area);
1125 r_save_area = convert_memory_address (Pmode, r_save_area);
1126 /* Copy the address of the save location to a register just in case it was
1127 based on the frame pointer. */
1128 r_save_area = copy_to_reg (r_save_area);
1129 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1130 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1131 plus_constant (Pmode, r_save_area,
1132 GET_MODE_SIZE (Pmode)));
1134 crtl->has_nonlocal_goto = 1;
1136 #ifdef HAVE_nonlocal_goto
1137 /* ??? We no longer need to pass the static chain value, afaik. */
1138 if (HAVE_nonlocal_goto)
1139 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1140 else
1141 #endif
1143 r_label = copy_to_reg (r_label);
1145 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1146 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1148 /* Restore frame pointer for containing function. */
1149 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1150 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1152 /* USE of hard_frame_pointer_rtx added for consistency;
1153 not clear if really needed. */
1154 emit_use (hard_frame_pointer_rtx);
1155 emit_use (stack_pointer_rtx);
1157 /* If the architecture is using a GP register, we must
1158 conservatively assume that the target function makes use of it.
1159 The prologue of functions with nonlocal gotos must therefore
1160 initialize the GP register to the appropriate value, and we
1161 must then make sure that this value is live at the point
1162 of the jump. (Note that this doesn't necessarily apply
1163 to targets with a nonlocal_goto pattern; they are free
1164 to implement it in their own way. Note also that this is
1165 a no-op if the GP register is a global invariant.) */
1166 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1167 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1168 emit_use (pic_offset_table_rtx);
1170 emit_indirect_jump (r_label);
1173 /* Search backwards to the jump insn and mark it as a
1174 non-local goto. */
1175 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1177 if (JUMP_P (insn))
1179 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1180 break;
1182 else if (CALL_P (insn))
1183 break;
1186 return const0_rtx;
1189 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1190 (not all will be used on all machines) that was passed to __builtin_setjmp.
1191 It updates the stack pointer in that block to correspond to the current
1192 stack pointer. */
1194 static void
1195 expand_builtin_update_setjmp_buf (rtx buf_addr)
1197 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1198 rtx stack_save
1199 = gen_rtx_MEM (sa_mode,
1200 memory_address
1201 (sa_mode,
1202 plus_constant (Pmode, buf_addr,
1203 2 * GET_MODE_SIZE (Pmode))));
1205 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1208 /* Expand a call to __builtin_prefetch. For a target that does not support
1209 data prefetch, evaluate the memory address argument in case it has side
1210 effects. */
1212 static void
1213 expand_builtin_prefetch (tree exp)
1215 tree arg0, arg1, arg2;
1216 int nargs;
1217 rtx op0, op1, op2;
1219 if (!validate_arglist (exp, POINTER_TYPE, 0))
1220 return;
1222 arg0 = CALL_EXPR_ARG (exp, 0);
1224 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1225 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1226 locality). */
1227 nargs = call_expr_nargs (exp);
1228 if (nargs > 1)
1229 arg1 = CALL_EXPR_ARG (exp, 1);
1230 else
1231 arg1 = integer_zero_node;
1232 if (nargs > 2)
1233 arg2 = CALL_EXPR_ARG (exp, 2);
1234 else
1235 arg2 = integer_three_node;
1237 /* Argument 0 is an address. */
1238 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1240 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1241 if (TREE_CODE (arg1) != INTEGER_CST)
1243 error ("second argument to %<__builtin_prefetch%> must be a constant");
1244 arg1 = integer_zero_node;
1246 op1 = expand_normal (arg1);
1247 /* Argument 1 must be either zero or one. */
1248 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1250 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1251 " using zero");
1252 op1 = const0_rtx;
1255 /* Argument 2 (locality) must be a compile-time constant int. */
1256 if (TREE_CODE (arg2) != INTEGER_CST)
1258 error ("third argument to %<__builtin_prefetch%> must be a constant");
1259 arg2 = integer_zero_node;
1261 op2 = expand_normal (arg2);
1262 /* Argument 2 must be 0, 1, 2, or 3. */
1263 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1265 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1266 op2 = const0_rtx;
1269 #ifdef HAVE_prefetch
1270 if (HAVE_prefetch)
1272 struct expand_operand ops[3];
1274 create_address_operand (&ops[0], op0);
1275 create_integer_operand (&ops[1], INTVAL (op1));
1276 create_integer_operand (&ops[2], INTVAL (op2));
1277 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1278 return;
1280 #endif
1282 /* Don't do anything with direct references to volatile memory, but
1283 generate code to handle other side effects. */
1284 if (!MEM_P (op0) && side_effects_p (op0))
1285 emit_insn (op0);
1288 /* Get a MEM rtx for expression EXP which is the address of an operand
1289 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1290 the maximum length of the block of memory that might be accessed or
1291 NULL if unknown. */
1293 static rtx
1294 get_memory_rtx (tree exp, tree len)
1296 tree orig_exp = exp;
1297 rtx addr, mem;
1299 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1300 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1301 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1302 exp = TREE_OPERAND (exp, 0);
1304 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1305 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1307 /* Get an expression we can use to find the attributes to assign to MEM.
1308 First remove any nops. */
1309 while (CONVERT_EXPR_P (exp)
1310 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1311 exp = TREE_OPERAND (exp, 0);
1313 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1314 (as builtin stringops may alias with anything). */
1315 exp = fold_build2 (MEM_REF,
1316 build_array_type (char_type_node,
1317 build_range_type (sizetype,
1318 size_one_node, len)),
1319 exp, build_int_cst (ptr_type_node, 0));
1321 /* If the MEM_REF has no acceptable address, try to get the base object
1322 from the original address we got, and build an all-aliasing
1323 unknown-sized access to that one. */
1324 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1325 set_mem_attributes (mem, exp, 0);
1326 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1327 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1328 0))))
1330 exp = build_fold_addr_expr (exp);
1331 exp = fold_build2 (MEM_REF,
1332 build_array_type (char_type_node,
1333 build_range_type (sizetype,
1334 size_zero_node,
1335 NULL)),
1336 exp, build_int_cst (ptr_type_node, 0));
1337 set_mem_attributes (mem, exp, 0);
1339 set_mem_alias_set (mem, 0);
1340 return mem;
1343 /* Built-in functions to perform an untyped call and return. */
1345 #define apply_args_mode \
1346 (this_target_builtins->x_apply_args_mode)
1347 #define apply_result_mode \
1348 (this_target_builtins->x_apply_result_mode)
1350 /* Return the size required for the block returned by __builtin_apply_args,
1351 and initialize apply_args_mode. */
1353 static int
1354 apply_args_size (void)
1356 static int size = -1;
1357 int align;
1358 unsigned int regno;
1359 enum machine_mode mode;
1361 /* The values computed by this function never change. */
1362 if (size < 0)
1364 /* The first value is the incoming arg-pointer. */
1365 size = GET_MODE_SIZE (Pmode);
1367 /* The second value is the structure value address unless this is
1368 passed as an "invisible" first argument. */
1369 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1370 size += GET_MODE_SIZE (Pmode);
1372 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1373 if (FUNCTION_ARG_REGNO_P (regno))
1375 mode = targetm.calls.get_raw_arg_mode (regno);
1377 gcc_assert (mode != VOIDmode);
1379 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1380 if (size % align != 0)
1381 size = CEIL (size, align) * align;
1382 size += GET_MODE_SIZE (mode);
1383 apply_args_mode[regno] = mode;
1385 else
1387 apply_args_mode[regno] = VOIDmode;
1390 return size;
1393 /* Return the size required for the block returned by __builtin_apply,
1394 and initialize apply_result_mode. */
1396 static int
1397 apply_result_size (void)
1399 static int size = -1;
1400 int align, regno;
1401 enum machine_mode mode;
1403 /* The values computed by this function never change. */
1404 if (size < 0)
1406 size = 0;
1408 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1409 if (targetm.calls.function_value_regno_p (regno))
1411 mode = targetm.calls.get_raw_result_mode (regno);
1413 gcc_assert (mode != VOIDmode);
1415 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1416 if (size % align != 0)
1417 size = CEIL (size, align) * align;
1418 size += GET_MODE_SIZE (mode);
1419 apply_result_mode[regno] = mode;
1421 else
1422 apply_result_mode[regno] = VOIDmode;
1424 /* Allow targets that use untyped_call and untyped_return to override
1425 the size so that machine-specific information can be stored here. */
1426 #ifdef APPLY_RESULT_SIZE
1427 size = APPLY_RESULT_SIZE;
1428 #endif
1430 return size;
1433 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1434 /* Create a vector describing the result block RESULT. If SAVEP is true,
1435 the result block is used to save the values; otherwise it is used to
1436 restore the values. */
1438 static rtx
1439 result_vector (int savep, rtx result)
1441 int regno, size, align, nelts;
1442 enum machine_mode mode;
1443 rtx reg, mem;
1444 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1446 size = nelts = 0;
1447 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1448 if ((mode = apply_result_mode[regno]) != VOIDmode)
1450 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1451 if (size % align != 0)
1452 size = CEIL (size, align) * align;
1453 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1454 mem = adjust_address (result, mode, size);
1455 savevec[nelts++] = (savep
1456 ? gen_rtx_SET (VOIDmode, mem, reg)
1457 : gen_rtx_SET (VOIDmode, reg, mem));
1458 size += GET_MODE_SIZE (mode);
1460 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1462 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1464 /* Save the state required to perform an untyped call with the same
1465 arguments as were passed to the current function. */
1467 static rtx
1468 expand_builtin_apply_args_1 (void)
1470 rtx registers, tem;
1471 int size, align, regno;
1472 enum machine_mode mode;
1473 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1475 /* Create a block where the arg-pointer, structure value address,
1476 and argument registers can be saved. */
1477 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1479 /* Walk past the arg-pointer and structure value address. */
1480 size = GET_MODE_SIZE (Pmode);
1481 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1482 size += GET_MODE_SIZE (Pmode);
1484 /* Save each register used in calling a function to the block. */
1485 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1486 if ((mode = apply_args_mode[regno]) != VOIDmode)
1488 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1489 if (size % align != 0)
1490 size = CEIL (size, align) * align;
1492 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1494 emit_move_insn (adjust_address (registers, mode, size), tem);
1495 size += GET_MODE_SIZE (mode);
1498 /* Save the arg pointer to the block. */
1499 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1500 #ifdef STACK_GROWS_DOWNWARD
1501 /* We need the pointer as the caller actually passed them to us, not
1502 as we might have pretended they were passed. Make sure it's a valid
1503 operand, as emit_move_insn isn't expected to handle a PLUS. */
1505 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1506 NULL_RTX);
1507 #endif
1508 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1510 size = GET_MODE_SIZE (Pmode);
1512 /* Save the structure value address unless this is passed as an
1513 "invisible" first argument. */
1514 if (struct_incoming_value)
1516 emit_move_insn (adjust_address (registers, Pmode, size),
1517 copy_to_reg (struct_incoming_value));
1518 size += GET_MODE_SIZE (Pmode);
1521 /* Return the address of the block. */
1522 return copy_addr_to_reg (XEXP (registers, 0));
1525 /* __builtin_apply_args returns block of memory allocated on
1526 the stack into which is stored the arg pointer, structure
1527 value address, static chain, and all the registers that might
1528 possibly be used in performing a function call. The code is
1529 moved to the start of the function so the incoming values are
1530 saved. */
1532 static rtx
1533 expand_builtin_apply_args (void)
1535 /* Don't do __builtin_apply_args more than once in a function.
1536 Save the result of the first call and reuse it. */
1537 if (apply_args_value != 0)
1538 return apply_args_value;
1540 /* When this function is called, it means that registers must be
1541 saved on entry to this function. So we migrate the
1542 call to the first insn of this function. */
1543 rtx temp;
1544 rtx seq;
1546 start_sequence ();
1547 temp = expand_builtin_apply_args_1 ();
1548 seq = get_insns ();
1549 end_sequence ();
1551 apply_args_value = temp;
1553 /* Put the insns after the NOTE that starts the function.
1554 If this is inside a start_sequence, make the outer-level insn
1555 chain current, so the code is placed at the start of the
1556 function. If internal_arg_pointer is a non-virtual pseudo,
1557 it needs to be placed after the function that initializes
1558 that pseudo. */
1559 push_topmost_sequence ();
1560 if (REG_P (crtl->args.internal_arg_pointer)
1561 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1562 emit_insn_before (seq, parm_birth_insn);
1563 else
1564 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1565 pop_topmost_sequence ();
1566 return temp;
1570 /* Perform an untyped call and save the state required to perform an
1571 untyped return of whatever value was returned by the given function. */
1573 static rtx
1574 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1576 int size, align, regno;
1577 enum machine_mode mode;
1578 rtx incoming_args, result, reg, dest, src;
1579 rtx_call_insn *call_insn;
1580 rtx old_stack_level = 0;
1581 rtx call_fusage = 0;
1582 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1584 arguments = convert_memory_address (Pmode, arguments);
1586 /* Create a block where the return registers can be saved. */
1587 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1589 /* Fetch the arg pointer from the ARGUMENTS block. */
1590 incoming_args = gen_reg_rtx (Pmode);
1591 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1592 #ifndef STACK_GROWS_DOWNWARD
1593 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1594 incoming_args, 0, OPTAB_LIB_WIDEN);
1595 #endif
1597 /* Push a new argument block and copy the arguments. Do not allow
1598 the (potential) memcpy call below to interfere with our stack
1599 manipulations. */
1600 do_pending_stack_adjust ();
1601 NO_DEFER_POP;
1603 /* Save the stack with nonlocal if available. */
1604 #ifdef HAVE_save_stack_nonlocal
1605 if (HAVE_save_stack_nonlocal)
1606 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1607 else
1608 #endif
1609 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1611 /* Allocate a block of memory onto the stack and copy the memory
1612 arguments to the outgoing arguments address. We can pass TRUE
1613 as the 4th argument because we just saved the stack pointer
1614 and will restore it right after the call. */
1615 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1617 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1618 may have already set current_function_calls_alloca to true.
1619 current_function_calls_alloca won't be set if argsize is zero,
1620 so we have to guarantee need_drap is true here. */
1621 if (SUPPORTS_STACK_ALIGNMENT)
1622 crtl->need_drap = true;
1624 dest = virtual_outgoing_args_rtx;
1625 #ifndef STACK_GROWS_DOWNWARD
1626 if (CONST_INT_P (argsize))
1627 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1628 else
1629 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1630 #endif
1631 dest = gen_rtx_MEM (BLKmode, dest);
1632 set_mem_align (dest, PARM_BOUNDARY);
1633 src = gen_rtx_MEM (BLKmode, incoming_args);
1634 set_mem_align (src, PARM_BOUNDARY);
1635 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1637 /* Refer to the argument block. */
1638 apply_args_size ();
1639 arguments = gen_rtx_MEM (BLKmode, arguments);
1640 set_mem_align (arguments, PARM_BOUNDARY);
1642 /* Walk past the arg-pointer and structure value address. */
1643 size = GET_MODE_SIZE (Pmode);
1644 if (struct_value)
1645 size += GET_MODE_SIZE (Pmode);
1647 /* Restore each of the registers previously saved. Make USE insns
1648 for each of these registers for use in making the call. */
1649 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1650 if ((mode = apply_args_mode[regno]) != VOIDmode)
1652 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1653 if (size % align != 0)
1654 size = CEIL (size, align) * align;
1655 reg = gen_rtx_REG (mode, regno);
1656 emit_move_insn (reg, adjust_address (arguments, mode, size));
1657 use_reg (&call_fusage, reg);
1658 size += GET_MODE_SIZE (mode);
1661 /* Restore the structure value address unless this is passed as an
1662 "invisible" first argument. */
1663 size = GET_MODE_SIZE (Pmode);
1664 if (struct_value)
1666 rtx value = gen_reg_rtx (Pmode);
1667 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1668 emit_move_insn (struct_value, value);
1669 if (REG_P (struct_value))
1670 use_reg (&call_fusage, struct_value);
1671 size += GET_MODE_SIZE (Pmode);
1674 /* All arguments and registers used for the call are set up by now! */
1675 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1677 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1678 and we don't want to load it into a register as an optimization,
1679 because prepare_call_address already did it if it should be done. */
1680 if (GET_CODE (function) != SYMBOL_REF)
1681 function = memory_address (FUNCTION_MODE, function);
1683 /* Generate the actual call instruction and save the return value. */
1684 #ifdef HAVE_untyped_call
1685 if (HAVE_untyped_call)
1686 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1687 result, result_vector (1, result)));
1688 else
1689 #endif
1690 #ifdef HAVE_call_value
1691 if (HAVE_call_value)
1693 rtx valreg = 0;
1695 /* Locate the unique return register. It is not possible to
1696 express a call that sets more than one return register using
1697 call_value; use untyped_call for that. In fact, untyped_call
1698 only needs to save the return registers in the given block. */
1699 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1700 if ((mode = apply_result_mode[regno]) != VOIDmode)
1702 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1704 valreg = gen_rtx_REG (mode, regno);
1707 emit_call_insn (GEN_CALL_VALUE (valreg,
1708 gen_rtx_MEM (FUNCTION_MODE, function),
1709 const0_rtx, NULL_RTX, const0_rtx));
1711 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1713 else
1714 #endif
1715 gcc_unreachable ();
1717 /* Find the CALL insn we just emitted, and attach the register usage
1718 information. */
1719 call_insn = last_call_insn ();
1720 add_function_usage_to (call_insn, call_fusage);
1722 /* Restore the stack. */
1723 #ifdef HAVE_save_stack_nonlocal
1724 if (HAVE_save_stack_nonlocal)
1725 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1726 else
1727 #endif
1728 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1729 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1731 OK_DEFER_POP;
1733 /* Return the address of the result block. */
1734 result = copy_addr_to_reg (XEXP (result, 0));
1735 return convert_memory_address (ptr_mode, result);
1738 /* Perform an untyped return. */
1740 static void
1741 expand_builtin_return (rtx result)
1743 int size, align, regno;
1744 enum machine_mode mode;
1745 rtx reg;
1746 rtx call_fusage = 0;
1748 result = convert_memory_address (Pmode, result);
1750 apply_result_size ();
1751 result = gen_rtx_MEM (BLKmode, result);
1753 #ifdef HAVE_untyped_return
1754 if (HAVE_untyped_return)
1756 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1757 emit_barrier ();
1758 return;
1760 #endif
1762 /* Restore the return value and note that each value is used. */
1763 size = 0;
1764 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1765 if ((mode = apply_result_mode[regno]) != VOIDmode)
1767 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1768 if (size % align != 0)
1769 size = CEIL (size, align) * align;
1770 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1771 emit_move_insn (reg, adjust_address (result, mode, size));
1773 push_to_sequence (call_fusage);
1774 emit_use (reg);
1775 call_fusage = get_insns ();
1776 end_sequence ();
1777 size += GET_MODE_SIZE (mode);
1780 /* Put the USE insns before the return. */
1781 emit_insn (call_fusage);
1783 /* Return whatever values was restored by jumping directly to the end
1784 of the function. */
1785 expand_naked_return ();
1788 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1790 static enum type_class
1791 type_to_class (tree type)
1793 switch (TREE_CODE (type))
1795 case VOID_TYPE: return void_type_class;
1796 case INTEGER_TYPE: return integer_type_class;
1797 case ENUMERAL_TYPE: return enumeral_type_class;
1798 case BOOLEAN_TYPE: return boolean_type_class;
1799 case POINTER_TYPE: return pointer_type_class;
1800 case REFERENCE_TYPE: return reference_type_class;
1801 case OFFSET_TYPE: return offset_type_class;
1802 case REAL_TYPE: return real_type_class;
1803 case COMPLEX_TYPE: return complex_type_class;
1804 case FUNCTION_TYPE: return function_type_class;
1805 case METHOD_TYPE: return method_type_class;
1806 case RECORD_TYPE: return record_type_class;
1807 case UNION_TYPE:
1808 case QUAL_UNION_TYPE: return union_type_class;
1809 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1810 ? string_type_class : array_type_class);
1811 case LANG_TYPE: return lang_type_class;
1812 default: return no_type_class;
1816 /* Expand a call EXP to __builtin_classify_type. */
1818 static rtx
1819 expand_builtin_classify_type (tree exp)
1821 if (call_expr_nargs (exp))
1822 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1823 return GEN_INT (no_type_class);
1826 /* This helper macro, meant to be used in mathfn_built_in below,
1827 determines which among a set of three builtin math functions is
1828 appropriate for a given type mode. The `F' and `L' cases are
1829 automatically generated from the `double' case. */
1830 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1831 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1832 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1833 fcodel = BUILT_IN_MATHFN##L ; break;
1834 /* Similar to above, but appends _R after any F/L suffix. */
1835 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1836 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1837 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1838 fcodel = BUILT_IN_MATHFN##L_R ; break;
1840 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1841 if available. If IMPLICIT is true use the implicit builtin declaration,
1842 otherwise use the explicit declaration. If we can't do the conversion,
1843 return zero. */
1845 static tree
1846 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1848 enum built_in_function fcode, fcodef, fcodel, fcode2;
1850 switch (fn)
1852 CASE_MATHFN (BUILT_IN_ACOS)
1853 CASE_MATHFN (BUILT_IN_ACOSH)
1854 CASE_MATHFN (BUILT_IN_ASIN)
1855 CASE_MATHFN (BUILT_IN_ASINH)
1856 CASE_MATHFN (BUILT_IN_ATAN)
1857 CASE_MATHFN (BUILT_IN_ATAN2)
1858 CASE_MATHFN (BUILT_IN_ATANH)
1859 CASE_MATHFN (BUILT_IN_CBRT)
1860 CASE_MATHFN (BUILT_IN_CEIL)
1861 CASE_MATHFN (BUILT_IN_CEXPI)
1862 CASE_MATHFN (BUILT_IN_COPYSIGN)
1863 CASE_MATHFN (BUILT_IN_COS)
1864 CASE_MATHFN (BUILT_IN_COSH)
1865 CASE_MATHFN (BUILT_IN_DREM)
1866 CASE_MATHFN (BUILT_IN_ERF)
1867 CASE_MATHFN (BUILT_IN_ERFC)
1868 CASE_MATHFN (BUILT_IN_EXP)
1869 CASE_MATHFN (BUILT_IN_EXP10)
1870 CASE_MATHFN (BUILT_IN_EXP2)
1871 CASE_MATHFN (BUILT_IN_EXPM1)
1872 CASE_MATHFN (BUILT_IN_FABS)
1873 CASE_MATHFN (BUILT_IN_FDIM)
1874 CASE_MATHFN (BUILT_IN_FLOOR)
1875 CASE_MATHFN (BUILT_IN_FMA)
1876 CASE_MATHFN (BUILT_IN_FMAX)
1877 CASE_MATHFN (BUILT_IN_FMIN)
1878 CASE_MATHFN (BUILT_IN_FMOD)
1879 CASE_MATHFN (BUILT_IN_FREXP)
1880 CASE_MATHFN (BUILT_IN_GAMMA)
1881 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1882 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1883 CASE_MATHFN (BUILT_IN_HYPOT)
1884 CASE_MATHFN (BUILT_IN_ILOGB)
1885 CASE_MATHFN (BUILT_IN_ICEIL)
1886 CASE_MATHFN (BUILT_IN_IFLOOR)
1887 CASE_MATHFN (BUILT_IN_INF)
1888 CASE_MATHFN (BUILT_IN_IRINT)
1889 CASE_MATHFN (BUILT_IN_IROUND)
1890 CASE_MATHFN (BUILT_IN_ISINF)
1891 CASE_MATHFN (BUILT_IN_J0)
1892 CASE_MATHFN (BUILT_IN_J1)
1893 CASE_MATHFN (BUILT_IN_JN)
1894 CASE_MATHFN (BUILT_IN_LCEIL)
1895 CASE_MATHFN (BUILT_IN_LDEXP)
1896 CASE_MATHFN (BUILT_IN_LFLOOR)
1897 CASE_MATHFN (BUILT_IN_LGAMMA)
1898 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1899 CASE_MATHFN (BUILT_IN_LLCEIL)
1900 CASE_MATHFN (BUILT_IN_LLFLOOR)
1901 CASE_MATHFN (BUILT_IN_LLRINT)
1902 CASE_MATHFN (BUILT_IN_LLROUND)
1903 CASE_MATHFN (BUILT_IN_LOG)
1904 CASE_MATHFN (BUILT_IN_LOG10)
1905 CASE_MATHFN (BUILT_IN_LOG1P)
1906 CASE_MATHFN (BUILT_IN_LOG2)
1907 CASE_MATHFN (BUILT_IN_LOGB)
1908 CASE_MATHFN (BUILT_IN_LRINT)
1909 CASE_MATHFN (BUILT_IN_LROUND)
1910 CASE_MATHFN (BUILT_IN_MODF)
1911 CASE_MATHFN (BUILT_IN_NAN)
1912 CASE_MATHFN (BUILT_IN_NANS)
1913 CASE_MATHFN (BUILT_IN_NEARBYINT)
1914 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1915 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1916 CASE_MATHFN (BUILT_IN_POW)
1917 CASE_MATHFN (BUILT_IN_POWI)
1918 CASE_MATHFN (BUILT_IN_POW10)
1919 CASE_MATHFN (BUILT_IN_REMAINDER)
1920 CASE_MATHFN (BUILT_IN_REMQUO)
1921 CASE_MATHFN (BUILT_IN_RINT)
1922 CASE_MATHFN (BUILT_IN_ROUND)
1923 CASE_MATHFN (BUILT_IN_SCALB)
1924 CASE_MATHFN (BUILT_IN_SCALBLN)
1925 CASE_MATHFN (BUILT_IN_SCALBN)
1926 CASE_MATHFN (BUILT_IN_SIGNBIT)
1927 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1928 CASE_MATHFN (BUILT_IN_SIN)
1929 CASE_MATHFN (BUILT_IN_SINCOS)
1930 CASE_MATHFN (BUILT_IN_SINH)
1931 CASE_MATHFN (BUILT_IN_SQRT)
1932 CASE_MATHFN (BUILT_IN_TAN)
1933 CASE_MATHFN (BUILT_IN_TANH)
1934 CASE_MATHFN (BUILT_IN_TGAMMA)
1935 CASE_MATHFN (BUILT_IN_TRUNC)
1936 CASE_MATHFN (BUILT_IN_Y0)
1937 CASE_MATHFN (BUILT_IN_Y1)
1938 CASE_MATHFN (BUILT_IN_YN)
1940 default:
1941 return NULL_TREE;
1944 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1945 fcode2 = fcode;
1946 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1947 fcode2 = fcodef;
1948 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1949 fcode2 = fcodel;
1950 else
1951 return NULL_TREE;
1953 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1954 return NULL_TREE;
1956 return builtin_decl_explicit (fcode2);
1959 /* Like mathfn_built_in_1(), but always use the implicit array. */
1961 tree
1962 mathfn_built_in (tree type, enum built_in_function fn)
1964 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1967 /* If errno must be maintained, expand the RTL to check if the result,
1968 TARGET, of a built-in function call, EXP, is NaN, and if so set
1969 errno to EDOM. */
1971 static void
1972 expand_errno_check (tree exp, rtx target)
1974 rtx_code_label *lab = gen_label_rtx ();
1976 /* Test the result; if it is NaN, set errno=EDOM because
1977 the argument was not in the domain. */
1978 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1979 NULL_RTX, NULL_RTX, lab,
1980 /* The jump is very likely. */
1981 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1983 #ifdef TARGET_EDOM
1984 /* If this built-in doesn't throw an exception, set errno directly. */
1985 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1987 #ifdef GEN_ERRNO_RTX
1988 rtx errno_rtx = GEN_ERRNO_RTX;
1989 #else
1990 rtx errno_rtx
1991 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1992 #endif
1993 emit_move_insn (errno_rtx,
1994 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1995 emit_label (lab);
1996 return;
1998 #endif
2000 /* Make sure the library call isn't expanded as a tail call. */
2001 CALL_EXPR_TAILCALL (exp) = 0;
2003 /* We can't set errno=EDOM directly; let the library call do it.
2004 Pop the arguments right away in case the call gets deleted. */
2005 NO_DEFER_POP;
2006 expand_call (exp, target, 0);
2007 OK_DEFER_POP;
2008 emit_label (lab);
2011 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2012 Return NULL_RTX if a normal call should be emitted rather than expanding
2013 the function in-line. EXP is the expression that is a call to the builtin
2014 function; if convenient, the result should be placed in TARGET.
2015 SUBTARGET may be used as the target for computing one of EXP's operands. */
2017 static rtx
2018 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2020 optab builtin_optab;
2021 rtx op0;
2022 rtx_insn *insns;
2023 tree fndecl = get_callee_fndecl (exp);
2024 enum machine_mode mode;
2025 bool errno_set = false;
2026 bool try_widening = false;
2027 tree arg;
2029 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2030 return NULL_RTX;
2032 arg = CALL_EXPR_ARG (exp, 0);
2034 switch (DECL_FUNCTION_CODE (fndecl))
2036 CASE_FLT_FN (BUILT_IN_SQRT):
2037 errno_set = ! tree_expr_nonnegative_p (arg);
2038 try_widening = true;
2039 builtin_optab = sqrt_optab;
2040 break;
2041 CASE_FLT_FN (BUILT_IN_EXP):
2042 errno_set = true; builtin_optab = exp_optab; break;
2043 CASE_FLT_FN (BUILT_IN_EXP10):
2044 CASE_FLT_FN (BUILT_IN_POW10):
2045 errno_set = true; builtin_optab = exp10_optab; break;
2046 CASE_FLT_FN (BUILT_IN_EXP2):
2047 errno_set = true; builtin_optab = exp2_optab; break;
2048 CASE_FLT_FN (BUILT_IN_EXPM1):
2049 errno_set = true; builtin_optab = expm1_optab; break;
2050 CASE_FLT_FN (BUILT_IN_LOGB):
2051 errno_set = true; builtin_optab = logb_optab; break;
2052 CASE_FLT_FN (BUILT_IN_LOG):
2053 errno_set = true; builtin_optab = log_optab; break;
2054 CASE_FLT_FN (BUILT_IN_LOG10):
2055 errno_set = true; builtin_optab = log10_optab; break;
2056 CASE_FLT_FN (BUILT_IN_LOG2):
2057 errno_set = true; builtin_optab = log2_optab; break;
2058 CASE_FLT_FN (BUILT_IN_LOG1P):
2059 errno_set = true; builtin_optab = log1p_optab; break;
2060 CASE_FLT_FN (BUILT_IN_ASIN):
2061 builtin_optab = asin_optab; break;
2062 CASE_FLT_FN (BUILT_IN_ACOS):
2063 builtin_optab = acos_optab; break;
2064 CASE_FLT_FN (BUILT_IN_TAN):
2065 builtin_optab = tan_optab; break;
2066 CASE_FLT_FN (BUILT_IN_ATAN):
2067 builtin_optab = atan_optab; break;
2068 CASE_FLT_FN (BUILT_IN_FLOOR):
2069 builtin_optab = floor_optab; break;
2070 CASE_FLT_FN (BUILT_IN_CEIL):
2071 builtin_optab = ceil_optab; break;
2072 CASE_FLT_FN (BUILT_IN_TRUNC):
2073 builtin_optab = btrunc_optab; break;
2074 CASE_FLT_FN (BUILT_IN_ROUND):
2075 builtin_optab = round_optab; break;
2076 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2077 builtin_optab = nearbyint_optab;
2078 if (flag_trapping_math)
2079 break;
2080 /* Else fallthrough and expand as rint. */
2081 CASE_FLT_FN (BUILT_IN_RINT):
2082 builtin_optab = rint_optab; break;
2083 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2084 builtin_optab = significand_optab; break;
2085 default:
2086 gcc_unreachable ();
2089 /* Make a suitable register to place result in. */
2090 mode = TYPE_MODE (TREE_TYPE (exp));
2092 if (! flag_errno_math || ! HONOR_NANS (mode))
2093 errno_set = false;
2095 /* Before working hard, check whether the instruction is available, but try
2096 to widen the mode for specific operations. */
2097 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2098 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2099 && (!errno_set || !optimize_insn_for_size_p ()))
2101 rtx result = gen_reg_rtx (mode);
2103 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2104 need to expand the argument again. This way, we will not perform
2105 side-effects more the once. */
2106 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2108 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2110 start_sequence ();
2112 /* Compute into RESULT.
2113 Set RESULT to wherever the result comes back. */
2114 result = expand_unop (mode, builtin_optab, op0, result, 0);
2116 if (result != 0)
2118 if (errno_set)
2119 expand_errno_check (exp, result);
2121 /* Output the entire sequence. */
2122 insns = get_insns ();
2123 end_sequence ();
2124 emit_insn (insns);
2125 return result;
2128 /* If we were unable to expand via the builtin, stop the sequence
2129 (without outputting the insns) and call to the library function
2130 with the stabilized argument list. */
2131 end_sequence ();
2134 return expand_call (exp, target, target == const0_rtx);
2137 /* Expand a call to the builtin binary math functions (pow and atan2).
2138 Return NULL_RTX if a normal call should be emitted rather than expanding the
2139 function in-line. EXP is the expression that is a call to the builtin
2140 function; if convenient, the result should be placed in TARGET.
2141 SUBTARGET may be used as the target for computing one of EXP's
2142 operands. */
2144 static rtx
2145 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2147 optab builtin_optab;
2148 rtx op0, op1, result;
2149 rtx_insn *insns;
2150 int op1_type = REAL_TYPE;
2151 tree fndecl = get_callee_fndecl (exp);
2152 tree arg0, arg1;
2153 enum machine_mode mode;
2154 bool errno_set = true;
2156 switch (DECL_FUNCTION_CODE (fndecl))
2158 CASE_FLT_FN (BUILT_IN_SCALBN):
2159 CASE_FLT_FN (BUILT_IN_SCALBLN):
2160 CASE_FLT_FN (BUILT_IN_LDEXP):
2161 op1_type = INTEGER_TYPE;
2162 default:
2163 break;
2166 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2167 return NULL_RTX;
2169 arg0 = CALL_EXPR_ARG (exp, 0);
2170 arg1 = CALL_EXPR_ARG (exp, 1);
2172 switch (DECL_FUNCTION_CODE (fndecl))
2174 CASE_FLT_FN (BUILT_IN_POW):
2175 builtin_optab = pow_optab; break;
2176 CASE_FLT_FN (BUILT_IN_ATAN2):
2177 builtin_optab = atan2_optab; break;
2178 CASE_FLT_FN (BUILT_IN_SCALB):
2179 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2180 return 0;
2181 builtin_optab = scalb_optab; break;
2182 CASE_FLT_FN (BUILT_IN_SCALBN):
2183 CASE_FLT_FN (BUILT_IN_SCALBLN):
2184 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2185 return 0;
2186 /* Fall through... */
2187 CASE_FLT_FN (BUILT_IN_LDEXP):
2188 builtin_optab = ldexp_optab; break;
2189 CASE_FLT_FN (BUILT_IN_FMOD):
2190 builtin_optab = fmod_optab; break;
2191 CASE_FLT_FN (BUILT_IN_REMAINDER):
2192 CASE_FLT_FN (BUILT_IN_DREM):
2193 builtin_optab = remainder_optab; break;
2194 default:
2195 gcc_unreachable ();
2198 /* Make a suitable register to place result in. */
2199 mode = TYPE_MODE (TREE_TYPE (exp));
2201 /* Before working hard, check whether the instruction is available. */
2202 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2203 return NULL_RTX;
2205 result = gen_reg_rtx (mode);
2207 if (! flag_errno_math || ! HONOR_NANS (mode))
2208 errno_set = false;
2210 if (errno_set && optimize_insn_for_size_p ())
2211 return 0;
2213 /* Always stabilize the argument list. */
2214 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2215 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2217 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2218 op1 = expand_normal (arg1);
2220 start_sequence ();
2222 /* Compute into RESULT.
2223 Set RESULT to wherever the result comes back. */
2224 result = expand_binop (mode, builtin_optab, op0, op1,
2225 result, 0, OPTAB_DIRECT);
2227 /* If we were unable to expand via the builtin, stop the sequence
2228 (without outputting the insns) and call to the library function
2229 with the stabilized argument list. */
2230 if (result == 0)
2232 end_sequence ();
2233 return expand_call (exp, target, target == const0_rtx);
2236 if (errno_set)
2237 expand_errno_check (exp, result);
2239 /* Output the entire sequence. */
2240 insns = get_insns ();
2241 end_sequence ();
2242 emit_insn (insns);
2244 return result;
2247 /* Expand a call to the builtin trinary math functions (fma).
2248 Return NULL_RTX if a normal call should be emitted rather than expanding the
2249 function in-line. EXP is the expression that is a call to the builtin
2250 function; if convenient, the result should be placed in TARGET.
2251 SUBTARGET may be used as the target for computing one of EXP's
2252 operands. */
2254 static rtx
2255 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2257 optab builtin_optab;
2258 rtx op0, op1, op2, result;
2259 rtx_insn *insns;
2260 tree fndecl = get_callee_fndecl (exp);
2261 tree arg0, arg1, arg2;
2262 enum machine_mode mode;
2264 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2265 return NULL_RTX;
2267 arg0 = CALL_EXPR_ARG (exp, 0);
2268 arg1 = CALL_EXPR_ARG (exp, 1);
2269 arg2 = CALL_EXPR_ARG (exp, 2);
2271 switch (DECL_FUNCTION_CODE (fndecl))
2273 CASE_FLT_FN (BUILT_IN_FMA):
2274 builtin_optab = fma_optab; break;
2275 default:
2276 gcc_unreachable ();
2279 /* Make a suitable register to place result in. */
2280 mode = TYPE_MODE (TREE_TYPE (exp));
2282 /* Before working hard, check whether the instruction is available. */
2283 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2284 return NULL_RTX;
2286 result = gen_reg_rtx (mode);
2288 /* Always stabilize the argument list. */
2289 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2290 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2291 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2293 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2294 op1 = expand_normal (arg1);
2295 op2 = expand_normal (arg2);
2297 start_sequence ();
2299 /* Compute into RESULT.
2300 Set RESULT to wherever the result comes back. */
2301 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2302 result, 0);
2304 /* If we were unable to expand via the builtin, stop the sequence
2305 (without outputting the insns) and call to the library function
2306 with the stabilized argument list. */
2307 if (result == 0)
2309 end_sequence ();
2310 return expand_call (exp, target, target == const0_rtx);
2313 /* Output the entire sequence. */
2314 insns = get_insns ();
2315 end_sequence ();
2316 emit_insn (insns);
2318 return result;
2321 /* Expand a call to the builtin sin and cos math functions.
2322 Return NULL_RTX if a normal call should be emitted rather than expanding the
2323 function in-line. EXP is the expression that is a call to the builtin
2324 function; if convenient, the result should be placed in TARGET.
2325 SUBTARGET may be used as the target for computing one of EXP's
2326 operands. */
2328 static rtx
2329 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2331 optab builtin_optab;
2332 rtx op0;
2333 rtx_insn *insns;
2334 tree fndecl = get_callee_fndecl (exp);
2335 enum machine_mode mode;
2336 tree arg;
2338 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2339 return NULL_RTX;
2341 arg = CALL_EXPR_ARG (exp, 0);
2343 switch (DECL_FUNCTION_CODE (fndecl))
2345 CASE_FLT_FN (BUILT_IN_SIN):
2346 CASE_FLT_FN (BUILT_IN_COS):
2347 builtin_optab = sincos_optab; break;
2348 default:
2349 gcc_unreachable ();
2352 /* Make a suitable register to place result in. */
2353 mode = TYPE_MODE (TREE_TYPE (exp));
2355 /* Check if sincos insn is available, otherwise fallback
2356 to sin or cos insn. */
2357 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2358 switch (DECL_FUNCTION_CODE (fndecl))
2360 CASE_FLT_FN (BUILT_IN_SIN):
2361 builtin_optab = sin_optab; break;
2362 CASE_FLT_FN (BUILT_IN_COS):
2363 builtin_optab = cos_optab; break;
2364 default:
2365 gcc_unreachable ();
2368 /* Before working hard, check whether the instruction is available. */
2369 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2371 rtx result = gen_reg_rtx (mode);
2373 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2374 need to expand the argument again. This way, we will not perform
2375 side-effects more the once. */
2376 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2378 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2380 start_sequence ();
2382 /* Compute into RESULT.
2383 Set RESULT to wherever the result comes back. */
2384 if (builtin_optab == sincos_optab)
2386 int ok;
2388 switch (DECL_FUNCTION_CODE (fndecl))
2390 CASE_FLT_FN (BUILT_IN_SIN):
2391 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2392 break;
2393 CASE_FLT_FN (BUILT_IN_COS):
2394 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2395 break;
2396 default:
2397 gcc_unreachable ();
2399 gcc_assert (ok);
2401 else
2402 result = expand_unop (mode, builtin_optab, op0, result, 0);
2404 if (result != 0)
2406 /* Output the entire sequence. */
2407 insns = get_insns ();
2408 end_sequence ();
2409 emit_insn (insns);
2410 return result;
2413 /* If we were unable to expand via the builtin, stop the sequence
2414 (without outputting the insns) and call to the library function
2415 with the stabilized argument list. */
2416 end_sequence ();
2419 return expand_call (exp, target, target == const0_rtx);
2422 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2423 return an RTL instruction code that implements the functionality.
2424 If that isn't possible or available return CODE_FOR_nothing. */
2426 static enum insn_code
2427 interclass_mathfn_icode (tree arg, tree fndecl)
2429 bool errno_set = false;
2430 optab builtin_optab = unknown_optab;
2431 enum machine_mode mode;
2433 switch (DECL_FUNCTION_CODE (fndecl))
2435 CASE_FLT_FN (BUILT_IN_ILOGB):
2436 errno_set = true; builtin_optab = ilogb_optab; break;
2437 CASE_FLT_FN (BUILT_IN_ISINF):
2438 builtin_optab = isinf_optab; break;
2439 case BUILT_IN_ISNORMAL:
2440 case BUILT_IN_ISFINITE:
2441 CASE_FLT_FN (BUILT_IN_FINITE):
2442 case BUILT_IN_FINITED32:
2443 case BUILT_IN_FINITED64:
2444 case BUILT_IN_FINITED128:
2445 case BUILT_IN_ISINFD32:
2446 case BUILT_IN_ISINFD64:
2447 case BUILT_IN_ISINFD128:
2448 /* These builtins have no optabs (yet). */
2449 break;
2450 default:
2451 gcc_unreachable ();
2454 /* There's no easy way to detect the case we need to set EDOM. */
2455 if (flag_errno_math && errno_set)
2456 return CODE_FOR_nothing;
2458 /* Optab mode depends on the mode of the input argument. */
2459 mode = TYPE_MODE (TREE_TYPE (arg));
2461 if (builtin_optab)
2462 return optab_handler (builtin_optab, mode);
2463 return CODE_FOR_nothing;
2466 /* Expand a call to one of the builtin math functions that operate on
2467 floating point argument and output an integer result (ilogb, isinf,
2468 isnan, etc).
2469 Return 0 if a normal call should be emitted rather than expanding the
2470 function in-line. EXP is the expression that is a call to the builtin
2471 function; if convenient, the result should be placed in TARGET. */
2473 static rtx
2474 expand_builtin_interclass_mathfn (tree exp, rtx target)
2476 enum insn_code icode = CODE_FOR_nothing;
2477 rtx op0;
2478 tree fndecl = get_callee_fndecl (exp);
2479 enum machine_mode mode;
2480 tree arg;
2482 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2483 return NULL_RTX;
2485 arg = CALL_EXPR_ARG (exp, 0);
2486 icode = interclass_mathfn_icode (arg, fndecl);
2487 mode = TYPE_MODE (TREE_TYPE (arg));
2489 if (icode != CODE_FOR_nothing)
2491 struct expand_operand ops[1];
2492 rtx_insn *last = get_last_insn ();
2493 tree orig_arg = arg;
2495 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2496 need to expand the argument again. This way, we will not perform
2497 side-effects more the once. */
2498 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2500 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2502 if (mode != GET_MODE (op0))
2503 op0 = convert_to_mode (mode, op0, 0);
2505 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2506 if (maybe_legitimize_operands (icode, 0, 1, ops)
2507 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2508 return ops[0].value;
2510 delete_insns_since (last);
2511 CALL_EXPR_ARG (exp, 0) = orig_arg;
2514 return NULL_RTX;
2517 /* Expand a call to the builtin sincos math function.
2518 Return NULL_RTX if a normal call should be emitted rather than expanding the
2519 function in-line. EXP is the expression that is a call to the builtin
2520 function. */
2522 static rtx
2523 expand_builtin_sincos (tree exp)
2525 rtx op0, op1, op2, target1, target2;
2526 enum machine_mode mode;
2527 tree arg, sinp, cosp;
2528 int result;
2529 location_t loc = EXPR_LOCATION (exp);
2530 tree alias_type, alias_off;
2532 if (!validate_arglist (exp, REAL_TYPE,
2533 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2534 return NULL_RTX;
2536 arg = CALL_EXPR_ARG (exp, 0);
2537 sinp = CALL_EXPR_ARG (exp, 1);
2538 cosp = CALL_EXPR_ARG (exp, 2);
2540 /* Make a suitable register to place result in. */
2541 mode = TYPE_MODE (TREE_TYPE (arg));
2543 /* Check if sincos insn is available, otherwise emit the call. */
2544 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2545 return NULL_RTX;
2547 target1 = gen_reg_rtx (mode);
2548 target2 = gen_reg_rtx (mode);
2550 op0 = expand_normal (arg);
2551 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2552 alias_off = build_int_cst (alias_type, 0);
2553 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2554 sinp, alias_off));
2555 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2556 cosp, alias_off));
2558 /* Compute into target1 and target2.
2559 Set TARGET to wherever the result comes back. */
2560 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2561 gcc_assert (result);
2563 /* Move target1 and target2 to the memory locations indicated
2564 by op1 and op2. */
2565 emit_move_insn (op1, target1);
2566 emit_move_insn (op2, target2);
2568 return const0_rtx;
2571 /* Expand a call to the internal cexpi builtin to the sincos math function.
2572 EXP is the expression that is a call to the builtin function; if convenient,
2573 the result should be placed in TARGET. */
2575 static rtx
2576 expand_builtin_cexpi (tree exp, rtx target)
2578 tree fndecl = get_callee_fndecl (exp);
2579 tree arg, type;
2580 enum machine_mode mode;
2581 rtx op0, op1, op2;
2582 location_t loc = EXPR_LOCATION (exp);
2584 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2585 return NULL_RTX;
2587 arg = CALL_EXPR_ARG (exp, 0);
2588 type = TREE_TYPE (arg);
2589 mode = TYPE_MODE (TREE_TYPE (arg));
2591 /* Try expanding via a sincos optab, fall back to emitting a libcall
2592 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2593 is only generated from sincos, cexp or if we have either of them. */
2594 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2596 op1 = gen_reg_rtx (mode);
2597 op2 = gen_reg_rtx (mode);
2599 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2601 /* Compute into op1 and op2. */
2602 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2604 else if (targetm.libc_has_function (function_sincos))
2606 tree call, fn = NULL_TREE;
2607 tree top1, top2;
2608 rtx op1a, op2a;
2610 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2611 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2612 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2613 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2614 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2615 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2616 else
2617 gcc_unreachable ();
2619 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2620 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2621 op1a = copy_addr_to_reg (XEXP (op1, 0));
2622 op2a = copy_addr_to_reg (XEXP (op2, 0));
2623 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2624 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2626 /* Make sure not to fold the sincos call again. */
2627 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2628 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2629 call, 3, arg, top1, top2));
2631 else
2633 tree call, fn = NULL_TREE, narg;
2634 tree ctype = build_complex_type (type);
2636 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2637 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2638 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2639 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2640 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2641 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2642 else
2643 gcc_unreachable ();
2645 /* If we don't have a decl for cexp create one. This is the
2646 friendliest fallback if the user calls __builtin_cexpi
2647 without full target C99 function support. */
2648 if (fn == NULL_TREE)
2650 tree fntype;
2651 const char *name = NULL;
2653 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2654 name = "cexpf";
2655 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2656 name = "cexp";
2657 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2658 name = "cexpl";
2660 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2661 fn = build_fn_decl (name, fntype);
2664 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2665 build_real (type, dconst0), arg);
2667 /* Make sure not to fold the cexp call again. */
2668 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2669 return expand_expr (build_call_nary (ctype, call, 1, narg),
2670 target, VOIDmode, EXPAND_NORMAL);
2673 /* Now build the proper return type. */
2674 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2675 make_tree (TREE_TYPE (arg), op2),
2676 make_tree (TREE_TYPE (arg), op1)),
2677 target, VOIDmode, EXPAND_NORMAL);
2680 /* Conveniently construct a function call expression. FNDECL names the
2681 function to be called, N is the number of arguments, and the "..."
2682 parameters are the argument expressions. Unlike build_call_exr
2683 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2685 static tree
2686 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2688 va_list ap;
2689 tree fntype = TREE_TYPE (fndecl);
2690 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2692 va_start (ap, n);
2693 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2694 va_end (ap);
2695 SET_EXPR_LOCATION (fn, loc);
2696 return fn;
2699 /* Expand a call to one of the builtin rounding functions gcc defines
2700 as an extension (lfloor and lceil). As these are gcc extensions we
2701 do not need to worry about setting errno to EDOM.
2702 If expanding via optab fails, lower expression to (int)(floor(x)).
2703 EXP is the expression that is a call to the builtin function;
2704 if convenient, the result should be placed in TARGET. */
2706 static rtx
2707 expand_builtin_int_roundingfn (tree exp, rtx target)
2709 convert_optab builtin_optab;
2710 rtx op0, tmp;
2711 rtx_insn *insns;
2712 tree fndecl = get_callee_fndecl (exp);
2713 enum built_in_function fallback_fn;
2714 tree fallback_fndecl;
2715 enum machine_mode mode;
2716 tree arg;
2718 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2719 gcc_unreachable ();
2721 arg = CALL_EXPR_ARG (exp, 0);
2723 switch (DECL_FUNCTION_CODE (fndecl))
2725 CASE_FLT_FN (BUILT_IN_ICEIL):
2726 CASE_FLT_FN (BUILT_IN_LCEIL):
2727 CASE_FLT_FN (BUILT_IN_LLCEIL):
2728 builtin_optab = lceil_optab;
2729 fallback_fn = BUILT_IN_CEIL;
2730 break;
2732 CASE_FLT_FN (BUILT_IN_IFLOOR):
2733 CASE_FLT_FN (BUILT_IN_LFLOOR):
2734 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2735 builtin_optab = lfloor_optab;
2736 fallback_fn = BUILT_IN_FLOOR;
2737 break;
2739 default:
2740 gcc_unreachable ();
2743 /* Make a suitable register to place result in. */
2744 mode = TYPE_MODE (TREE_TYPE (exp));
2746 target = gen_reg_rtx (mode);
2748 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2749 need to expand the argument again. This way, we will not perform
2750 side-effects more the once. */
2751 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2753 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2755 start_sequence ();
2757 /* Compute into TARGET. */
2758 if (expand_sfix_optab (target, op0, builtin_optab))
2760 /* Output the entire sequence. */
2761 insns = get_insns ();
2762 end_sequence ();
2763 emit_insn (insns);
2764 return target;
2767 /* If we were unable to expand via the builtin, stop the sequence
2768 (without outputting the insns). */
2769 end_sequence ();
2771 /* Fall back to floating point rounding optab. */
2772 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2774 /* For non-C99 targets we may end up without a fallback fndecl here
2775 if the user called __builtin_lfloor directly. In this case emit
2776 a call to the floor/ceil variants nevertheless. This should result
2777 in the best user experience for not full C99 targets. */
2778 if (fallback_fndecl == NULL_TREE)
2780 tree fntype;
2781 const char *name = NULL;
2783 switch (DECL_FUNCTION_CODE (fndecl))
2785 case BUILT_IN_ICEIL:
2786 case BUILT_IN_LCEIL:
2787 case BUILT_IN_LLCEIL:
2788 name = "ceil";
2789 break;
2790 case BUILT_IN_ICEILF:
2791 case BUILT_IN_LCEILF:
2792 case BUILT_IN_LLCEILF:
2793 name = "ceilf";
2794 break;
2795 case BUILT_IN_ICEILL:
2796 case BUILT_IN_LCEILL:
2797 case BUILT_IN_LLCEILL:
2798 name = "ceill";
2799 break;
2800 case BUILT_IN_IFLOOR:
2801 case BUILT_IN_LFLOOR:
2802 case BUILT_IN_LLFLOOR:
2803 name = "floor";
2804 break;
2805 case BUILT_IN_IFLOORF:
2806 case BUILT_IN_LFLOORF:
2807 case BUILT_IN_LLFLOORF:
2808 name = "floorf";
2809 break;
2810 case BUILT_IN_IFLOORL:
2811 case BUILT_IN_LFLOORL:
2812 case BUILT_IN_LLFLOORL:
2813 name = "floorl";
2814 break;
2815 default:
2816 gcc_unreachable ();
2819 fntype = build_function_type_list (TREE_TYPE (arg),
2820 TREE_TYPE (arg), NULL_TREE);
2821 fallback_fndecl = build_fn_decl (name, fntype);
2824 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2826 tmp = expand_normal (exp);
2827 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2829 /* Truncate the result of floating point optab to integer
2830 via expand_fix (). */
2831 target = gen_reg_rtx (mode);
2832 expand_fix (target, tmp, 0);
2834 return target;
2837 /* Expand a call to one of the builtin math functions doing integer
2838 conversion (lrint).
2839 Return 0 if a normal call should be emitted rather than expanding the
2840 function in-line. EXP is the expression that is a call to the builtin
2841 function; if convenient, the result should be placed in TARGET. */
2843 static rtx
2844 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2846 convert_optab builtin_optab;
2847 rtx op0;
2848 rtx_insn *insns;
2849 tree fndecl = get_callee_fndecl (exp);
2850 tree arg;
2851 enum machine_mode mode;
2852 enum built_in_function fallback_fn = BUILT_IN_NONE;
2854 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2855 gcc_unreachable ();
2857 arg = CALL_EXPR_ARG (exp, 0);
2859 switch (DECL_FUNCTION_CODE (fndecl))
2861 CASE_FLT_FN (BUILT_IN_IRINT):
2862 fallback_fn = BUILT_IN_LRINT;
2863 /* FALLTHRU */
2864 CASE_FLT_FN (BUILT_IN_LRINT):
2865 CASE_FLT_FN (BUILT_IN_LLRINT):
2866 builtin_optab = lrint_optab;
2867 break;
2869 CASE_FLT_FN (BUILT_IN_IROUND):
2870 fallback_fn = BUILT_IN_LROUND;
2871 /* FALLTHRU */
2872 CASE_FLT_FN (BUILT_IN_LROUND):
2873 CASE_FLT_FN (BUILT_IN_LLROUND):
2874 builtin_optab = lround_optab;
2875 break;
2877 default:
2878 gcc_unreachable ();
2881 /* There's no easy way to detect the case we need to set EDOM. */
2882 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2883 return NULL_RTX;
2885 /* Make a suitable register to place result in. */
2886 mode = TYPE_MODE (TREE_TYPE (exp));
2888 /* There's no easy way to detect the case we need to set EDOM. */
2889 if (!flag_errno_math)
2891 rtx result = gen_reg_rtx (mode);
2893 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2894 need to expand the argument again. This way, we will not perform
2895 side-effects more the once. */
2896 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2898 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2900 start_sequence ();
2902 if (expand_sfix_optab (result, op0, builtin_optab))
2904 /* Output the entire sequence. */
2905 insns = get_insns ();
2906 end_sequence ();
2907 emit_insn (insns);
2908 return result;
2911 /* If we were unable to expand via the builtin, stop the sequence
2912 (without outputting the insns) and call to the library function
2913 with the stabilized argument list. */
2914 end_sequence ();
2917 if (fallback_fn != BUILT_IN_NONE)
2919 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2920 targets, (int) round (x) should never be transformed into
2921 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2922 a call to lround in the hope that the target provides at least some
2923 C99 functions. This should result in the best user experience for
2924 not full C99 targets. */
2925 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2926 fallback_fn, 0);
2928 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2929 fallback_fndecl, 1, arg);
2931 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2932 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2933 return convert_to_mode (mode, target, 0);
2936 return expand_call (exp, target, target == const0_rtx);
2939 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2940 a normal call should be emitted rather than expanding the function
2941 in-line. EXP is the expression that is a call to the builtin
2942 function; if convenient, the result should be placed in TARGET. */
2944 static rtx
2945 expand_builtin_powi (tree exp, rtx target)
2947 tree arg0, arg1;
2948 rtx op0, op1;
2949 enum machine_mode mode;
2950 enum machine_mode mode2;
2952 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2953 return NULL_RTX;
2955 arg0 = CALL_EXPR_ARG (exp, 0);
2956 arg1 = CALL_EXPR_ARG (exp, 1);
2957 mode = TYPE_MODE (TREE_TYPE (exp));
2959 /* Emit a libcall to libgcc. */
2961 /* Mode of the 2nd argument must match that of an int. */
2962 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2964 if (target == NULL_RTX)
2965 target = gen_reg_rtx (mode);
2967 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2968 if (GET_MODE (op0) != mode)
2969 op0 = convert_to_mode (mode, op0, 0);
2970 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2971 if (GET_MODE (op1) != mode2)
2972 op1 = convert_to_mode (mode2, op1, 0);
2974 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2975 target, LCT_CONST, mode, 2,
2976 op0, mode, op1, mode2);
2978 return target;
2981 /* Expand expression EXP which is a call to the strlen builtin. Return
2982 NULL_RTX if we failed the caller should emit a normal call, otherwise
2983 try to get the result in TARGET, if convenient. */
2985 static rtx
2986 expand_builtin_strlen (tree exp, rtx target,
2987 enum machine_mode target_mode)
2989 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2990 return NULL_RTX;
2991 else
2993 struct expand_operand ops[4];
2994 rtx pat;
2995 tree len;
2996 tree src = CALL_EXPR_ARG (exp, 0);
2997 rtx src_reg;
2998 rtx_insn *before_strlen;
2999 enum machine_mode insn_mode = target_mode;
3000 enum insn_code icode = CODE_FOR_nothing;
3001 unsigned int align;
3003 /* If the length can be computed at compile-time, return it. */
3004 len = c_strlen (src, 0);
3005 if (len)
3006 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3008 /* If the length can be computed at compile-time and is constant
3009 integer, but there are side-effects in src, evaluate
3010 src for side-effects, then return len.
3011 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3012 can be optimized into: i++; x = 3; */
3013 len = c_strlen (src, 1);
3014 if (len && TREE_CODE (len) == INTEGER_CST)
3016 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3017 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3020 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3022 /* If SRC is not a pointer type, don't do this operation inline. */
3023 if (align == 0)
3024 return NULL_RTX;
3026 /* Bail out if we can't compute strlen in the right mode. */
3027 while (insn_mode != VOIDmode)
3029 icode = optab_handler (strlen_optab, insn_mode);
3030 if (icode != CODE_FOR_nothing)
3031 break;
3033 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3035 if (insn_mode == VOIDmode)
3036 return NULL_RTX;
3038 /* Make a place to hold the source address. We will not expand
3039 the actual source until we are sure that the expansion will
3040 not fail -- there are trees that cannot be expanded twice. */
3041 src_reg = gen_reg_rtx (Pmode);
3043 /* Mark the beginning of the strlen sequence so we can emit the
3044 source operand later. */
3045 before_strlen = get_last_insn ();
3047 create_output_operand (&ops[0], target, insn_mode);
3048 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3049 create_integer_operand (&ops[2], 0);
3050 create_integer_operand (&ops[3], align);
3051 if (!maybe_expand_insn (icode, 4, ops))
3052 return NULL_RTX;
3054 /* Now that we are assured of success, expand the source. */
3055 start_sequence ();
3056 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3057 if (pat != src_reg)
3059 #ifdef POINTERS_EXTEND_UNSIGNED
3060 if (GET_MODE (pat) != Pmode)
3061 pat = convert_to_mode (Pmode, pat,
3062 POINTERS_EXTEND_UNSIGNED);
3063 #endif
3064 emit_move_insn (src_reg, pat);
3066 pat = get_insns ();
3067 end_sequence ();
3069 if (before_strlen)
3070 emit_insn_after (pat, before_strlen);
3071 else
3072 emit_insn_before (pat, get_insns ());
3074 /* Return the value in the proper mode for this function. */
3075 if (GET_MODE (ops[0].value) == target_mode)
3076 target = ops[0].value;
3077 else if (target != 0)
3078 convert_move (target, ops[0].value, 0);
3079 else
3080 target = convert_to_mode (target_mode, ops[0].value, 0);
3082 return target;
3086 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3087 bytes from constant string DATA + OFFSET and return it as target
3088 constant. */
3090 static rtx
3091 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3092 enum machine_mode mode)
3094 const char *str = (const char *) data;
3096 gcc_assert (offset >= 0
3097 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3098 <= strlen (str) + 1));
3100 return c_readstr (str + offset, mode);
3103 /* LEN specify length of the block of memcpy/memset operation.
3104 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3105 In some cases we can make very likely guess on max size, then we
3106 set it into PROBABLE_MAX_SIZE. */
3108 static void
3109 determine_block_size (tree len, rtx len_rtx,
3110 unsigned HOST_WIDE_INT *min_size,
3111 unsigned HOST_WIDE_INT *max_size,
3112 unsigned HOST_WIDE_INT *probable_max_size)
3114 if (CONST_INT_P (len_rtx))
3116 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3117 return;
3119 else
3121 wide_int min, max;
3122 enum value_range_type range_type = VR_UNDEFINED;
3124 /* Determine bounds from the type. */
3125 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3126 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3127 else
3128 *min_size = 0;
3129 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3130 *probable_max_size = *max_size
3131 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3132 else
3133 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3135 if (TREE_CODE (len) == SSA_NAME)
3136 range_type = get_range_info (len, &min, &max);
3137 if (range_type == VR_RANGE)
3139 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3140 *min_size = min.to_uhwi ();
3141 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3142 *probable_max_size = *max_size = max.to_uhwi ();
3144 else if (range_type == VR_ANTI_RANGE)
3146 /* Anti range 0...N lets us to determine minimal size to N+1. */
3147 if (min == 0)
3149 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3150 *min_size = max.to_uhwi () + 1;
3152 /* Code like
3154 int n;
3155 if (n < 100)
3156 memcpy (a, b, n)
3158 Produce anti range allowing negative values of N. We still
3159 can use the information and make a guess that N is not negative.
3161 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3162 *probable_max_size = min.to_uhwi () - 1;
3165 gcc_checking_assert (*max_size <=
3166 (unsigned HOST_WIDE_INT)
3167 GET_MODE_MASK (GET_MODE (len_rtx)));
3170 /* Expand a call EXP to the memcpy builtin.
3171 Return NULL_RTX if we failed, the caller should emit a normal call,
3172 otherwise try to get the result in TARGET, if convenient (and in
3173 mode MODE if that's convenient). */
3175 static rtx
3176 expand_builtin_memcpy (tree exp, rtx target)
3178 if (!validate_arglist (exp,
3179 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3180 return NULL_RTX;
3181 else
3183 tree dest = CALL_EXPR_ARG (exp, 0);
3184 tree src = CALL_EXPR_ARG (exp, 1);
3185 tree len = CALL_EXPR_ARG (exp, 2);
3186 const char *src_str;
3187 unsigned int src_align = get_pointer_alignment (src);
3188 unsigned int dest_align = get_pointer_alignment (dest);
3189 rtx dest_mem, src_mem, dest_addr, len_rtx;
3190 HOST_WIDE_INT expected_size = -1;
3191 unsigned int expected_align = 0;
3192 unsigned HOST_WIDE_INT min_size;
3193 unsigned HOST_WIDE_INT max_size;
3194 unsigned HOST_WIDE_INT probable_max_size;
3196 /* If DEST is not a pointer type, call the normal function. */
3197 if (dest_align == 0)
3198 return NULL_RTX;
3200 /* If either SRC is not a pointer type, don't do this
3201 operation in-line. */
3202 if (src_align == 0)
3203 return NULL_RTX;
3205 if (currently_expanding_gimple_stmt)
3206 stringop_block_profile (currently_expanding_gimple_stmt,
3207 &expected_align, &expected_size);
3209 if (expected_align < dest_align)
3210 expected_align = dest_align;
3211 dest_mem = get_memory_rtx (dest, len);
3212 set_mem_align (dest_mem, dest_align);
3213 len_rtx = expand_normal (len);
3214 determine_block_size (len, len_rtx, &min_size, &max_size,
3215 &probable_max_size);
3216 src_str = c_getstr (src);
3218 /* If SRC is a string constant and block move would be done
3219 by pieces, we can avoid loading the string from memory
3220 and only stored the computed constants. */
3221 if (src_str
3222 && CONST_INT_P (len_rtx)
3223 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3224 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3225 CONST_CAST (char *, src_str),
3226 dest_align, false))
3228 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3229 builtin_memcpy_read_str,
3230 CONST_CAST (char *, src_str),
3231 dest_align, false, 0);
3232 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3233 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3234 return dest_mem;
3237 src_mem = get_memory_rtx (src, len);
3238 set_mem_align (src_mem, src_align);
3240 /* Copy word part most expediently. */
3241 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3242 CALL_EXPR_TAILCALL (exp)
3243 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3244 expected_align, expected_size,
3245 min_size, max_size, probable_max_size);
3247 if (dest_addr == 0)
3249 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3250 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3252 return dest_addr;
3256 /* Expand a call EXP to the mempcpy builtin.
3257 Return NULL_RTX if we failed; the caller should emit a normal call,
3258 otherwise try to get the result in TARGET, if convenient (and in
3259 mode MODE if that's convenient). If ENDP is 0 return the
3260 destination pointer, if ENDP is 1 return the end pointer ala
3261 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3262 stpcpy. */
3264 static rtx
3265 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3267 if (!validate_arglist (exp,
3268 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3269 return NULL_RTX;
3270 else
3272 tree dest = CALL_EXPR_ARG (exp, 0);
3273 tree src = CALL_EXPR_ARG (exp, 1);
3274 tree len = CALL_EXPR_ARG (exp, 2);
3275 return expand_builtin_mempcpy_args (dest, src, len,
3276 target, mode, /*endp=*/ 1);
3280 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3281 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3282 so that this can also be called without constructing an actual CALL_EXPR.
3283 The other arguments and return value are the same as for
3284 expand_builtin_mempcpy. */
3286 static rtx
3287 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3288 rtx target, enum machine_mode mode, int endp)
3290 /* If return value is ignored, transform mempcpy into memcpy. */
3291 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3293 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3294 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3295 dest, src, len);
3296 return expand_expr (result, target, mode, EXPAND_NORMAL);
3298 else
3300 const char *src_str;
3301 unsigned int src_align = get_pointer_alignment (src);
3302 unsigned int dest_align = get_pointer_alignment (dest);
3303 rtx dest_mem, src_mem, len_rtx;
3305 /* If either SRC or DEST is not a pointer type, don't do this
3306 operation in-line. */
3307 if (dest_align == 0 || src_align == 0)
3308 return NULL_RTX;
3310 /* If LEN is not constant, call the normal function. */
3311 if (! tree_fits_uhwi_p (len))
3312 return NULL_RTX;
3314 len_rtx = expand_normal (len);
3315 src_str = c_getstr (src);
3317 /* If SRC is a string constant and block move would be done
3318 by pieces, we can avoid loading the string from memory
3319 and only stored the computed constants. */
3320 if (src_str
3321 && CONST_INT_P (len_rtx)
3322 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3323 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3324 CONST_CAST (char *, src_str),
3325 dest_align, false))
3327 dest_mem = get_memory_rtx (dest, len);
3328 set_mem_align (dest_mem, dest_align);
3329 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3330 builtin_memcpy_read_str,
3331 CONST_CAST (char *, src_str),
3332 dest_align, false, endp);
3333 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3334 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3335 return dest_mem;
3338 if (CONST_INT_P (len_rtx)
3339 && can_move_by_pieces (INTVAL (len_rtx),
3340 MIN (dest_align, src_align)))
3342 dest_mem = get_memory_rtx (dest, len);
3343 set_mem_align (dest_mem, dest_align);
3344 src_mem = get_memory_rtx (src, len);
3345 set_mem_align (src_mem, src_align);
3346 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3347 MIN (dest_align, src_align), endp);
3348 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3349 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3350 return dest_mem;
3353 return NULL_RTX;
3357 #ifndef HAVE_movstr
3358 # define HAVE_movstr 0
3359 # define CODE_FOR_movstr CODE_FOR_nothing
3360 #endif
3362 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3363 we failed, the caller should emit a normal call, otherwise try to
3364 get the result in TARGET, if convenient. If ENDP is 0 return the
3365 destination pointer, if ENDP is 1 return the end pointer ala
3366 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3367 stpcpy. */
3369 static rtx
3370 expand_movstr (tree dest, tree src, rtx target, int endp)
3372 struct expand_operand ops[3];
3373 rtx dest_mem;
3374 rtx src_mem;
3376 if (!HAVE_movstr)
3377 return NULL_RTX;
3379 dest_mem = get_memory_rtx (dest, NULL);
3380 src_mem = get_memory_rtx (src, NULL);
3381 if (!endp)
3383 target = force_reg (Pmode, XEXP (dest_mem, 0));
3384 dest_mem = replace_equiv_address (dest_mem, target);
3387 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3388 create_fixed_operand (&ops[1], dest_mem);
3389 create_fixed_operand (&ops[2], src_mem);
3390 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3391 return NULL_RTX;
3393 if (endp && target != const0_rtx)
3395 target = ops[0].value;
3396 /* movstr is supposed to set end to the address of the NUL
3397 terminator. If the caller requested a mempcpy-like return value,
3398 adjust it. */
3399 if (endp == 1)
3401 rtx tem = plus_constant (GET_MODE (target),
3402 gen_lowpart (GET_MODE (target), target), 1);
3403 emit_move_insn (target, force_operand (tem, NULL_RTX));
3406 return target;
3409 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3410 NULL_RTX if we failed the caller should emit a normal call, otherwise
3411 try to get the result in TARGET, if convenient (and in mode MODE if that's
3412 convenient). */
3414 static rtx
3415 expand_builtin_strcpy (tree exp, rtx target)
3417 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3419 tree dest = CALL_EXPR_ARG (exp, 0);
3420 tree src = CALL_EXPR_ARG (exp, 1);
3421 return expand_builtin_strcpy_args (dest, src, target);
3423 return NULL_RTX;
3426 /* Helper function to do the actual work for expand_builtin_strcpy. The
3427 arguments to the builtin_strcpy call DEST and SRC are broken out
3428 so that this can also be called without constructing an actual CALL_EXPR.
3429 The other arguments and return value are the same as for
3430 expand_builtin_strcpy. */
3432 static rtx
3433 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3435 return expand_movstr (dest, src, target, /*endp=*/0);
3438 /* Expand a call EXP to the stpcpy builtin.
3439 Return NULL_RTX if we failed the caller should emit a normal call,
3440 otherwise try to get the result in TARGET, if convenient (and in
3441 mode MODE if that's convenient). */
3443 static rtx
3444 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3446 tree dst, src;
3447 location_t loc = EXPR_LOCATION (exp);
3449 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3450 return NULL_RTX;
3452 dst = CALL_EXPR_ARG (exp, 0);
3453 src = CALL_EXPR_ARG (exp, 1);
3455 /* If return value is ignored, transform stpcpy into strcpy. */
3456 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3458 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3459 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3460 return expand_expr (result, target, mode, EXPAND_NORMAL);
3462 else
3464 tree len, lenp1;
3465 rtx ret;
3467 /* Ensure we get an actual string whose length can be evaluated at
3468 compile-time, not an expression containing a string. This is
3469 because the latter will potentially produce pessimized code
3470 when used to produce the return value. */
3471 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3472 return expand_movstr (dst, src, target, /*endp=*/2);
3474 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3475 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3476 target, mode, /*endp=*/2);
3478 if (ret)
3479 return ret;
3481 if (TREE_CODE (len) == INTEGER_CST)
3483 rtx len_rtx = expand_normal (len);
3485 if (CONST_INT_P (len_rtx))
3487 ret = expand_builtin_strcpy_args (dst, src, target);
3489 if (ret)
3491 if (! target)
3493 if (mode != VOIDmode)
3494 target = gen_reg_rtx (mode);
3495 else
3496 target = gen_reg_rtx (GET_MODE (ret));
3498 if (GET_MODE (target) != GET_MODE (ret))
3499 ret = gen_lowpart (GET_MODE (target), ret);
3501 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3502 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3503 gcc_assert (ret);
3505 return target;
3510 return expand_movstr (dst, src, target, /*endp=*/2);
3514 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3515 bytes from constant string DATA + OFFSET and return it as target
3516 constant. */
3519 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3520 enum machine_mode mode)
3522 const char *str = (const char *) data;
3524 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3525 return const0_rtx;
3527 return c_readstr (str + offset, mode);
3530 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3531 NULL_RTX if we failed the caller should emit a normal call. */
3533 static rtx
3534 expand_builtin_strncpy (tree exp, rtx target)
3536 location_t loc = EXPR_LOCATION (exp);
3538 if (validate_arglist (exp,
3539 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3541 tree dest = CALL_EXPR_ARG (exp, 0);
3542 tree src = CALL_EXPR_ARG (exp, 1);
3543 tree len = CALL_EXPR_ARG (exp, 2);
3544 tree slen = c_strlen (src, 1);
3546 /* We must be passed a constant len and src parameter. */
3547 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3548 return NULL_RTX;
3550 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3552 /* We're required to pad with trailing zeros if the requested
3553 len is greater than strlen(s2)+1. In that case try to
3554 use store_by_pieces, if it fails, punt. */
3555 if (tree_int_cst_lt (slen, len))
3557 unsigned int dest_align = get_pointer_alignment (dest);
3558 const char *p = c_getstr (src);
3559 rtx dest_mem;
3561 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3562 || !can_store_by_pieces (tree_to_uhwi (len),
3563 builtin_strncpy_read_str,
3564 CONST_CAST (char *, p),
3565 dest_align, false))
3566 return NULL_RTX;
3568 dest_mem = get_memory_rtx (dest, len);
3569 store_by_pieces (dest_mem, tree_to_uhwi (len),
3570 builtin_strncpy_read_str,
3571 CONST_CAST (char *, p), dest_align, false, 0);
3572 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3573 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3574 return dest_mem;
3577 return NULL_RTX;
3580 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3581 bytes from constant string DATA + OFFSET and return it as target
3582 constant. */
3585 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3586 enum machine_mode mode)
3588 const char *c = (const char *) data;
3589 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3591 memset (p, *c, GET_MODE_SIZE (mode));
3593 return c_readstr (p, mode);
3596 /* Callback routine for store_by_pieces. Return the RTL of a register
3597 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3598 char value given in the RTL register data. For example, if mode is
3599 4 bytes wide, return the RTL for 0x01010101*data. */
3601 static rtx
3602 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3603 enum machine_mode mode)
3605 rtx target, coeff;
3606 size_t size;
3607 char *p;
3609 size = GET_MODE_SIZE (mode);
3610 if (size == 1)
3611 return (rtx) data;
3613 p = XALLOCAVEC (char, size);
3614 memset (p, 1, size);
3615 coeff = c_readstr (p, mode);
3617 target = convert_to_mode (mode, (rtx) data, 1);
3618 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3619 return force_reg (mode, target);
3622 /* Expand expression EXP, which is a call to the memset builtin. Return
3623 NULL_RTX if we failed the caller should emit a normal call, otherwise
3624 try to get the result in TARGET, if convenient (and in mode MODE if that's
3625 convenient). */
3627 static rtx
3628 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3630 if (!validate_arglist (exp,
3631 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3632 return NULL_RTX;
3633 else
3635 tree dest = CALL_EXPR_ARG (exp, 0);
3636 tree val = CALL_EXPR_ARG (exp, 1);
3637 tree len = CALL_EXPR_ARG (exp, 2);
3638 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3642 /* Helper function to do the actual work for expand_builtin_memset. The
3643 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3644 so that this can also be called without constructing an actual CALL_EXPR.
3645 The other arguments and return value are the same as for
3646 expand_builtin_memset. */
3648 static rtx
3649 expand_builtin_memset_args (tree dest, tree val, tree len,
3650 rtx target, enum machine_mode mode, tree orig_exp)
3652 tree fndecl, fn;
3653 enum built_in_function fcode;
3654 enum machine_mode val_mode;
3655 char c;
3656 unsigned int dest_align;
3657 rtx dest_mem, dest_addr, len_rtx;
3658 HOST_WIDE_INT expected_size = -1;
3659 unsigned int expected_align = 0;
3660 unsigned HOST_WIDE_INT min_size;
3661 unsigned HOST_WIDE_INT max_size;
3662 unsigned HOST_WIDE_INT probable_max_size;
3664 dest_align = get_pointer_alignment (dest);
3666 /* If DEST is not a pointer type, don't do this operation in-line. */
3667 if (dest_align == 0)
3668 return NULL_RTX;
3670 if (currently_expanding_gimple_stmt)
3671 stringop_block_profile (currently_expanding_gimple_stmt,
3672 &expected_align, &expected_size);
3674 if (expected_align < dest_align)
3675 expected_align = dest_align;
3677 /* If the LEN parameter is zero, return DEST. */
3678 if (integer_zerop (len))
3680 /* Evaluate and ignore VAL in case it has side-effects. */
3681 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3682 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3685 /* Stabilize the arguments in case we fail. */
3686 dest = builtin_save_expr (dest);
3687 val = builtin_save_expr (val);
3688 len = builtin_save_expr (len);
3690 len_rtx = expand_normal (len);
3691 determine_block_size (len, len_rtx, &min_size, &max_size,
3692 &probable_max_size);
3693 dest_mem = get_memory_rtx (dest, len);
3694 val_mode = TYPE_MODE (unsigned_char_type_node);
3696 if (TREE_CODE (val) != INTEGER_CST)
3698 rtx val_rtx;
3700 val_rtx = expand_normal (val);
3701 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3703 /* Assume that we can memset by pieces if we can store
3704 * the coefficients by pieces (in the required modes).
3705 * We can't pass builtin_memset_gen_str as that emits RTL. */
3706 c = 1;
3707 if (tree_fits_uhwi_p (len)
3708 && can_store_by_pieces (tree_to_uhwi (len),
3709 builtin_memset_read_str, &c, dest_align,
3710 true))
3712 val_rtx = force_reg (val_mode, val_rtx);
3713 store_by_pieces (dest_mem, tree_to_uhwi (len),
3714 builtin_memset_gen_str, val_rtx, dest_align,
3715 true, 0);
3717 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3718 dest_align, expected_align,
3719 expected_size, min_size, max_size,
3720 probable_max_size))
3721 goto do_libcall;
3723 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3724 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3725 return dest_mem;
3728 if (target_char_cast (val, &c))
3729 goto do_libcall;
3731 if (c)
3733 if (tree_fits_uhwi_p (len)
3734 && can_store_by_pieces (tree_to_uhwi (len),
3735 builtin_memset_read_str, &c, dest_align,
3736 true))
3737 store_by_pieces (dest_mem, tree_to_uhwi (len),
3738 builtin_memset_read_str, &c, dest_align, true, 0);
3739 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3740 gen_int_mode (c, val_mode),
3741 dest_align, expected_align,
3742 expected_size, min_size, max_size,
3743 probable_max_size))
3744 goto do_libcall;
3746 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3747 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3748 return dest_mem;
3751 set_mem_align (dest_mem, dest_align);
3752 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3753 CALL_EXPR_TAILCALL (orig_exp)
3754 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3755 expected_align, expected_size,
3756 min_size, max_size,
3757 probable_max_size);
3759 if (dest_addr == 0)
3761 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3762 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3765 return dest_addr;
3767 do_libcall:
3768 fndecl = get_callee_fndecl (orig_exp);
3769 fcode = DECL_FUNCTION_CODE (fndecl);
3770 if (fcode == BUILT_IN_MEMSET)
3771 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3772 dest, val, len);
3773 else if (fcode == BUILT_IN_BZERO)
3774 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3775 dest, len);
3776 else
3777 gcc_unreachable ();
3778 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3779 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3780 return expand_call (fn, target, target == const0_rtx);
3783 /* Expand expression EXP, which is a call to the bzero builtin. Return
3784 NULL_RTX if we failed the caller should emit a normal call. */
3786 static rtx
3787 expand_builtin_bzero (tree exp)
3789 tree dest, size;
3790 location_t loc = EXPR_LOCATION (exp);
3792 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3793 return NULL_RTX;
3795 dest = CALL_EXPR_ARG (exp, 0);
3796 size = CALL_EXPR_ARG (exp, 1);
3798 /* New argument list transforming bzero(ptr x, int y) to
3799 memset(ptr x, int 0, size_t y). This is done this way
3800 so that if it isn't expanded inline, we fallback to
3801 calling bzero instead of memset. */
3803 return expand_builtin_memset_args (dest, integer_zero_node,
3804 fold_convert_loc (loc,
3805 size_type_node, size),
3806 const0_rtx, VOIDmode, exp);
3809 /* Expand expression EXP, which is a call to the memcmp built-in function.
3810 Return NULL_RTX if we failed and the caller should emit a normal call,
3811 otherwise try to get the result in TARGET, if convenient (and in mode
3812 MODE, if that's convenient). */
3814 static rtx
3815 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3816 ATTRIBUTE_UNUSED enum machine_mode mode)
3818 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3820 if (!validate_arglist (exp,
3821 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3822 return NULL_RTX;
3824 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3825 implementing memcmp because it will stop if it encounters two
3826 zero bytes. */
3827 #if defined HAVE_cmpmemsi
3829 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3830 rtx result;
3831 rtx insn;
3832 tree arg1 = CALL_EXPR_ARG (exp, 0);
3833 tree arg2 = CALL_EXPR_ARG (exp, 1);
3834 tree len = CALL_EXPR_ARG (exp, 2);
3836 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3837 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3838 enum machine_mode insn_mode;
3840 if (HAVE_cmpmemsi)
3841 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3842 else
3843 return NULL_RTX;
3845 /* If we don't have POINTER_TYPE, call the function. */
3846 if (arg1_align == 0 || arg2_align == 0)
3847 return NULL_RTX;
3849 /* Make a place to write the result of the instruction. */
3850 result = target;
3851 if (! (result != 0
3852 && REG_P (result) && GET_MODE (result) == insn_mode
3853 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3854 result = gen_reg_rtx (insn_mode);
3856 arg1_rtx = get_memory_rtx (arg1, len);
3857 arg2_rtx = get_memory_rtx (arg2, len);
3858 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3860 /* Set MEM_SIZE as appropriate. */
3861 if (CONST_INT_P (arg3_rtx))
3863 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3864 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3867 if (HAVE_cmpmemsi)
3868 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3869 GEN_INT (MIN (arg1_align, arg2_align)));
3870 else
3871 gcc_unreachable ();
3873 if (insn)
3874 emit_insn (insn);
3875 else
3876 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3877 TYPE_MODE (integer_type_node), 3,
3878 XEXP (arg1_rtx, 0), Pmode,
3879 XEXP (arg2_rtx, 0), Pmode,
3880 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3881 TYPE_UNSIGNED (sizetype)),
3882 TYPE_MODE (sizetype));
3884 /* Return the value in the proper mode for this function. */
3885 mode = TYPE_MODE (TREE_TYPE (exp));
3886 if (GET_MODE (result) == mode)
3887 return result;
3888 else if (target != 0)
3890 convert_move (target, result, 0);
3891 return target;
3893 else
3894 return convert_to_mode (mode, result, 0);
3896 #endif /* HAVE_cmpmemsi. */
3898 return NULL_RTX;
3901 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3902 if we failed the caller should emit a normal call, otherwise try to get
3903 the result in TARGET, if convenient. */
3905 static rtx
3906 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3908 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3909 return NULL_RTX;
3911 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3912 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3913 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3915 rtx arg1_rtx, arg2_rtx;
3916 rtx result, insn = NULL_RTX;
3917 tree fndecl, fn;
3918 tree arg1 = CALL_EXPR_ARG (exp, 0);
3919 tree arg2 = CALL_EXPR_ARG (exp, 1);
3921 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3922 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3924 /* If we don't have POINTER_TYPE, call the function. */
3925 if (arg1_align == 0 || arg2_align == 0)
3926 return NULL_RTX;
3928 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3929 arg1 = builtin_save_expr (arg1);
3930 arg2 = builtin_save_expr (arg2);
3932 arg1_rtx = get_memory_rtx (arg1, NULL);
3933 arg2_rtx = get_memory_rtx (arg2, NULL);
3935 #ifdef HAVE_cmpstrsi
3936 /* Try to call cmpstrsi. */
3937 if (HAVE_cmpstrsi)
3939 enum machine_mode insn_mode
3940 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3942 /* Make a place to write the result of the instruction. */
3943 result = target;
3944 if (! (result != 0
3945 && REG_P (result) && GET_MODE (result) == insn_mode
3946 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3947 result = gen_reg_rtx (insn_mode);
3949 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3950 GEN_INT (MIN (arg1_align, arg2_align)));
3952 #endif
3953 #ifdef HAVE_cmpstrnsi
3954 /* Try to determine at least one length and call cmpstrnsi. */
3955 if (!insn && HAVE_cmpstrnsi)
3957 tree len;
3958 rtx arg3_rtx;
3960 enum machine_mode insn_mode
3961 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3962 tree len1 = c_strlen (arg1, 1);
3963 tree len2 = c_strlen (arg2, 1);
3965 if (len1)
3966 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3967 if (len2)
3968 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3970 /* If we don't have a constant length for the first, use the length
3971 of the second, if we know it. We don't require a constant for
3972 this case; some cost analysis could be done if both are available
3973 but neither is constant. For now, assume they're equally cheap,
3974 unless one has side effects. If both strings have constant lengths,
3975 use the smaller. */
3977 if (!len1)
3978 len = len2;
3979 else if (!len2)
3980 len = len1;
3981 else if (TREE_SIDE_EFFECTS (len1))
3982 len = len2;
3983 else if (TREE_SIDE_EFFECTS (len2))
3984 len = len1;
3985 else if (TREE_CODE (len1) != INTEGER_CST)
3986 len = len2;
3987 else if (TREE_CODE (len2) != INTEGER_CST)
3988 len = len1;
3989 else if (tree_int_cst_lt (len1, len2))
3990 len = len1;
3991 else
3992 len = len2;
3994 /* If both arguments have side effects, we cannot optimize. */
3995 if (!len || TREE_SIDE_EFFECTS (len))
3996 goto do_libcall;
3998 arg3_rtx = expand_normal (len);
4000 /* Make a place to write the result of the instruction. */
4001 result = target;
4002 if (! (result != 0
4003 && REG_P (result) && GET_MODE (result) == insn_mode
4004 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4005 result = gen_reg_rtx (insn_mode);
4007 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4008 GEN_INT (MIN (arg1_align, arg2_align)));
4010 #endif
4012 if (insn)
4014 enum machine_mode mode;
4015 emit_insn (insn);
4017 /* Return the value in the proper mode for this function. */
4018 mode = TYPE_MODE (TREE_TYPE (exp));
4019 if (GET_MODE (result) == mode)
4020 return result;
4021 if (target == 0)
4022 return convert_to_mode (mode, result, 0);
4023 convert_move (target, result, 0);
4024 return target;
4027 /* Expand the library call ourselves using a stabilized argument
4028 list to avoid re-evaluating the function's arguments twice. */
4029 #ifdef HAVE_cmpstrnsi
4030 do_libcall:
4031 #endif
4032 fndecl = get_callee_fndecl (exp);
4033 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4034 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4035 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4036 return expand_call (fn, target, target == const0_rtx);
4038 #endif
4039 return NULL_RTX;
4042 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4043 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4044 the result in TARGET, if convenient. */
4046 static rtx
4047 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4048 ATTRIBUTE_UNUSED enum machine_mode mode)
4050 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4052 if (!validate_arglist (exp,
4053 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4054 return NULL_RTX;
4056 /* If c_strlen can determine an expression for one of the string
4057 lengths, and it doesn't have side effects, then emit cmpstrnsi
4058 using length MIN(strlen(string)+1, arg3). */
4059 #ifdef HAVE_cmpstrnsi
4060 if (HAVE_cmpstrnsi)
4062 tree len, len1, len2;
4063 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4064 rtx result, insn;
4065 tree fndecl, fn;
4066 tree arg1 = CALL_EXPR_ARG (exp, 0);
4067 tree arg2 = CALL_EXPR_ARG (exp, 1);
4068 tree arg3 = CALL_EXPR_ARG (exp, 2);
4070 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4071 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4072 enum machine_mode insn_mode
4073 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4075 len1 = c_strlen (arg1, 1);
4076 len2 = c_strlen (arg2, 1);
4078 if (len1)
4079 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4080 if (len2)
4081 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4083 /* If we don't have a constant length for the first, use the length
4084 of the second, if we know it. We don't require a constant for
4085 this case; some cost analysis could be done if both are available
4086 but neither is constant. For now, assume they're equally cheap,
4087 unless one has side effects. If both strings have constant lengths,
4088 use the smaller. */
4090 if (!len1)
4091 len = len2;
4092 else if (!len2)
4093 len = len1;
4094 else if (TREE_SIDE_EFFECTS (len1))
4095 len = len2;
4096 else if (TREE_SIDE_EFFECTS (len2))
4097 len = len1;
4098 else if (TREE_CODE (len1) != INTEGER_CST)
4099 len = len2;
4100 else if (TREE_CODE (len2) != INTEGER_CST)
4101 len = len1;
4102 else if (tree_int_cst_lt (len1, len2))
4103 len = len1;
4104 else
4105 len = len2;
4107 /* If both arguments have side effects, we cannot optimize. */
4108 if (!len || TREE_SIDE_EFFECTS (len))
4109 return NULL_RTX;
4111 /* The actual new length parameter is MIN(len,arg3). */
4112 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4113 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4115 /* If we don't have POINTER_TYPE, call the function. */
4116 if (arg1_align == 0 || arg2_align == 0)
4117 return NULL_RTX;
4119 /* Make a place to write the result of the instruction. */
4120 result = target;
4121 if (! (result != 0
4122 && REG_P (result) && GET_MODE (result) == insn_mode
4123 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4124 result = gen_reg_rtx (insn_mode);
4126 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4127 arg1 = builtin_save_expr (arg1);
4128 arg2 = builtin_save_expr (arg2);
4129 len = builtin_save_expr (len);
4131 arg1_rtx = get_memory_rtx (arg1, len);
4132 arg2_rtx = get_memory_rtx (arg2, len);
4133 arg3_rtx = expand_normal (len);
4134 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4135 GEN_INT (MIN (arg1_align, arg2_align)));
4136 if (insn)
4138 emit_insn (insn);
4140 /* Return the value in the proper mode for this function. */
4141 mode = TYPE_MODE (TREE_TYPE (exp));
4142 if (GET_MODE (result) == mode)
4143 return result;
4144 if (target == 0)
4145 return convert_to_mode (mode, result, 0);
4146 convert_move (target, result, 0);
4147 return target;
4150 /* Expand the library call ourselves using a stabilized argument
4151 list to avoid re-evaluating the function's arguments twice. */
4152 fndecl = get_callee_fndecl (exp);
4153 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4154 arg1, arg2, len);
4155 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4156 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4157 return expand_call (fn, target, target == const0_rtx);
4159 #endif
4160 return NULL_RTX;
4163 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4164 if that's convenient. */
4167 expand_builtin_saveregs (void)
4169 rtx val;
4170 rtx_insn *seq;
4172 /* Don't do __builtin_saveregs more than once in a function.
4173 Save the result of the first call and reuse it. */
4174 if (saveregs_value != 0)
4175 return saveregs_value;
4177 /* When this function is called, it means that registers must be
4178 saved on entry to this function. So we migrate the call to the
4179 first insn of this function. */
4181 start_sequence ();
4183 /* Do whatever the machine needs done in this case. */
4184 val = targetm.calls.expand_builtin_saveregs ();
4186 seq = get_insns ();
4187 end_sequence ();
4189 saveregs_value = val;
4191 /* Put the insns after the NOTE that starts the function. If this
4192 is inside a start_sequence, make the outer-level insn chain current, so
4193 the code is placed at the start of the function. */
4194 push_topmost_sequence ();
4195 emit_insn_after (seq, entry_of_function ());
4196 pop_topmost_sequence ();
4198 return val;
4201 /* Expand a call to __builtin_next_arg. */
4203 static rtx
4204 expand_builtin_next_arg (void)
4206 /* Checking arguments is already done in fold_builtin_next_arg
4207 that must be called before this function. */
4208 return expand_binop (ptr_mode, add_optab,
4209 crtl->args.internal_arg_pointer,
4210 crtl->args.arg_offset_rtx,
4211 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4214 /* Make it easier for the backends by protecting the valist argument
4215 from multiple evaluations. */
4217 static tree
4218 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4220 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4222 /* The current way of determining the type of valist is completely
4223 bogus. We should have the information on the va builtin instead. */
4224 if (!vatype)
4225 vatype = targetm.fn_abi_va_list (cfun->decl);
4227 if (TREE_CODE (vatype) == ARRAY_TYPE)
4229 if (TREE_SIDE_EFFECTS (valist))
4230 valist = save_expr (valist);
4232 /* For this case, the backends will be expecting a pointer to
4233 vatype, but it's possible we've actually been given an array
4234 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4235 So fix it. */
4236 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4238 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4239 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4242 else
4244 tree pt = build_pointer_type (vatype);
4246 if (! needs_lvalue)
4248 if (! TREE_SIDE_EFFECTS (valist))
4249 return valist;
4251 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4252 TREE_SIDE_EFFECTS (valist) = 1;
4255 if (TREE_SIDE_EFFECTS (valist))
4256 valist = save_expr (valist);
4257 valist = fold_build2_loc (loc, MEM_REF,
4258 vatype, valist, build_int_cst (pt, 0));
4261 return valist;
4264 /* The "standard" definition of va_list is void*. */
4266 tree
4267 std_build_builtin_va_list (void)
4269 return ptr_type_node;
4272 /* The "standard" abi va_list is va_list_type_node. */
4274 tree
4275 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4277 return va_list_type_node;
4280 /* The "standard" type of va_list is va_list_type_node. */
4282 tree
4283 std_canonical_va_list_type (tree type)
4285 tree wtype, htype;
4287 if (INDIRECT_REF_P (type))
4288 type = TREE_TYPE (type);
4289 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4290 type = TREE_TYPE (type);
4291 wtype = va_list_type_node;
4292 htype = type;
4293 /* Treat structure va_list types. */
4294 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4295 htype = TREE_TYPE (htype);
4296 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4298 /* If va_list is an array type, the argument may have decayed
4299 to a pointer type, e.g. by being passed to another function.
4300 In that case, unwrap both types so that we can compare the
4301 underlying records. */
4302 if (TREE_CODE (htype) == ARRAY_TYPE
4303 || POINTER_TYPE_P (htype))
4305 wtype = TREE_TYPE (wtype);
4306 htype = TREE_TYPE (htype);
4309 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4310 return va_list_type_node;
4312 return NULL_TREE;
4315 /* The "standard" implementation of va_start: just assign `nextarg' to
4316 the variable. */
4318 void
4319 std_expand_builtin_va_start (tree valist, rtx nextarg)
4321 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4322 convert_move (va_r, nextarg, 0);
4325 /* Expand EXP, a call to __builtin_va_start. */
4327 static rtx
4328 expand_builtin_va_start (tree exp)
4330 rtx nextarg;
4331 tree valist;
4332 location_t loc = EXPR_LOCATION (exp);
4334 if (call_expr_nargs (exp) < 2)
4336 error_at (loc, "too few arguments to function %<va_start%>");
4337 return const0_rtx;
4340 if (fold_builtin_next_arg (exp, true))
4341 return const0_rtx;
4343 nextarg = expand_builtin_next_arg ();
4344 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4346 if (targetm.expand_builtin_va_start)
4347 targetm.expand_builtin_va_start (valist, nextarg);
4348 else
4349 std_expand_builtin_va_start (valist, nextarg);
4351 return const0_rtx;
4354 /* Expand EXP, a call to __builtin_va_end. */
4356 static rtx
4357 expand_builtin_va_end (tree exp)
4359 tree valist = CALL_EXPR_ARG (exp, 0);
4361 /* Evaluate for side effects, if needed. I hate macros that don't
4362 do that. */
4363 if (TREE_SIDE_EFFECTS (valist))
4364 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4366 return const0_rtx;
4369 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4370 builtin rather than just as an assignment in stdarg.h because of the
4371 nastiness of array-type va_list types. */
4373 static rtx
4374 expand_builtin_va_copy (tree exp)
4376 tree dst, src, t;
4377 location_t loc = EXPR_LOCATION (exp);
4379 dst = CALL_EXPR_ARG (exp, 0);
4380 src = CALL_EXPR_ARG (exp, 1);
4382 dst = stabilize_va_list_loc (loc, dst, 1);
4383 src = stabilize_va_list_loc (loc, src, 0);
4385 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4387 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4389 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4390 TREE_SIDE_EFFECTS (t) = 1;
4391 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4393 else
4395 rtx dstb, srcb, size;
4397 /* Evaluate to pointers. */
4398 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4399 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4400 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4401 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4403 dstb = convert_memory_address (Pmode, dstb);
4404 srcb = convert_memory_address (Pmode, srcb);
4406 /* "Dereference" to BLKmode memories. */
4407 dstb = gen_rtx_MEM (BLKmode, dstb);
4408 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4409 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4410 srcb = gen_rtx_MEM (BLKmode, srcb);
4411 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4412 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4414 /* Copy. */
4415 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4418 return const0_rtx;
4421 /* Expand a call to one of the builtin functions __builtin_frame_address or
4422 __builtin_return_address. */
4424 static rtx
4425 expand_builtin_frame_address (tree fndecl, tree exp)
4427 /* The argument must be a nonnegative integer constant.
4428 It counts the number of frames to scan up the stack.
4429 The value is the return address saved in that frame. */
4430 if (call_expr_nargs (exp) == 0)
4431 /* Warning about missing arg was already issued. */
4432 return const0_rtx;
4433 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4435 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4436 error ("invalid argument to %<__builtin_frame_address%>");
4437 else
4438 error ("invalid argument to %<__builtin_return_address%>");
4439 return const0_rtx;
4441 else
4443 rtx tem
4444 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4445 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4447 /* Some ports cannot access arbitrary stack frames. */
4448 if (tem == NULL)
4450 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4451 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4452 else
4453 warning (0, "unsupported argument to %<__builtin_return_address%>");
4454 return const0_rtx;
4457 /* For __builtin_frame_address, return what we've got. */
4458 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4459 return tem;
4461 if (!REG_P (tem)
4462 && ! CONSTANT_P (tem))
4463 tem = copy_addr_to_reg (tem);
4464 return tem;
4468 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4469 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4470 is the same as for allocate_dynamic_stack_space. */
4472 static rtx
4473 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4475 rtx op0;
4476 rtx result;
4477 bool valid_arglist;
4478 unsigned int align;
4479 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4480 == BUILT_IN_ALLOCA_WITH_ALIGN);
4482 valid_arglist
4483 = (alloca_with_align
4484 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4485 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4487 if (!valid_arglist)
4488 return NULL_RTX;
4490 /* Compute the argument. */
4491 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4493 /* Compute the alignment. */
4494 align = (alloca_with_align
4495 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4496 : BIGGEST_ALIGNMENT);
4498 /* Allocate the desired space. */
4499 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4500 result = convert_memory_address (ptr_mode, result);
4502 return result;
4505 /* Expand a call to bswap builtin in EXP.
4506 Return NULL_RTX if a normal call should be emitted rather than expanding the
4507 function in-line. If convenient, the result should be placed in TARGET.
4508 SUBTARGET may be used as the target for computing one of EXP's operands. */
4510 static rtx
4511 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4512 rtx subtarget)
4514 tree arg;
4515 rtx op0;
4517 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4518 return NULL_RTX;
4520 arg = CALL_EXPR_ARG (exp, 0);
4521 op0 = expand_expr (arg,
4522 subtarget && GET_MODE (subtarget) == target_mode
4523 ? subtarget : NULL_RTX,
4524 target_mode, EXPAND_NORMAL);
4525 if (GET_MODE (op0) != target_mode)
4526 op0 = convert_to_mode (target_mode, op0, 1);
4528 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4530 gcc_assert (target);
4532 return convert_to_mode (target_mode, target, 1);
4535 /* Expand a call to a unary builtin in EXP.
4536 Return NULL_RTX if a normal call should be emitted rather than expanding the
4537 function in-line. If convenient, the result should be placed in TARGET.
4538 SUBTARGET may be used as the target for computing one of EXP's operands. */
4540 static rtx
4541 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4542 rtx subtarget, optab op_optab)
4544 rtx op0;
4546 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4547 return NULL_RTX;
4549 /* Compute the argument. */
4550 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4551 (subtarget
4552 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4553 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4554 VOIDmode, EXPAND_NORMAL);
4555 /* Compute op, into TARGET if possible.
4556 Set TARGET to wherever the result comes back. */
4557 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4558 op_optab, op0, target, op_optab != clrsb_optab);
4559 gcc_assert (target);
4561 return convert_to_mode (target_mode, target, 0);
4564 /* Expand a call to __builtin_expect. We just return our argument
4565 as the builtin_expect semantic should've been already executed by
4566 tree branch prediction pass. */
4568 static rtx
4569 expand_builtin_expect (tree exp, rtx target)
4571 tree arg;
4573 if (call_expr_nargs (exp) < 2)
4574 return const0_rtx;
4575 arg = CALL_EXPR_ARG (exp, 0);
4577 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4578 /* When guessing was done, the hints should be already stripped away. */
4579 gcc_assert (!flag_guess_branch_prob
4580 || optimize == 0 || seen_error ());
4581 return target;
4584 /* Expand a call to __builtin_assume_aligned. We just return our first
4585 argument as the builtin_assume_aligned semantic should've been already
4586 executed by CCP. */
4588 static rtx
4589 expand_builtin_assume_aligned (tree exp, rtx target)
4591 if (call_expr_nargs (exp) < 2)
4592 return const0_rtx;
4593 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4594 EXPAND_NORMAL);
4595 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4596 && (call_expr_nargs (exp) < 3
4597 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4598 return target;
4601 void
4602 expand_builtin_trap (void)
4604 #ifdef HAVE_trap
4605 if (HAVE_trap)
4607 rtx insn = emit_insn (gen_trap ());
4608 /* For trap insns when not accumulating outgoing args force
4609 REG_ARGS_SIZE note to prevent crossjumping of calls with
4610 different args sizes. */
4611 if (!ACCUMULATE_OUTGOING_ARGS)
4612 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4614 else
4615 #endif
4616 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4617 emit_barrier ();
4620 /* Expand a call to __builtin_unreachable. We do nothing except emit
4621 a barrier saying that control flow will not pass here.
4623 It is the responsibility of the program being compiled to ensure
4624 that control flow does never reach __builtin_unreachable. */
4625 static void
4626 expand_builtin_unreachable (void)
4628 emit_barrier ();
4631 /* Expand EXP, a call to fabs, fabsf or fabsl.
4632 Return NULL_RTX if a normal call should be emitted rather than expanding
4633 the function inline. If convenient, the result should be placed
4634 in TARGET. SUBTARGET may be used as the target for computing
4635 the operand. */
4637 static rtx
4638 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4640 enum machine_mode mode;
4641 tree arg;
4642 rtx op0;
4644 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4645 return NULL_RTX;
4647 arg = CALL_EXPR_ARG (exp, 0);
4648 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4649 mode = TYPE_MODE (TREE_TYPE (arg));
4650 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4651 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4654 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4655 Return NULL is a normal call should be emitted rather than expanding the
4656 function inline. If convenient, the result should be placed in TARGET.
4657 SUBTARGET may be used as the target for computing the operand. */
4659 static rtx
4660 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4662 rtx op0, op1;
4663 tree arg;
4665 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4666 return NULL_RTX;
4668 arg = CALL_EXPR_ARG (exp, 0);
4669 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4671 arg = CALL_EXPR_ARG (exp, 1);
4672 op1 = expand_normal (arg);
4674 return expand_copysign (op0, op1, target);
4677 /* Expand a call to __builtin___clear_cache. */
4679 static rtx
4680 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4682 #ifndef HAVE_clear_cache
4683 #ifdef CLEAR_INSN_CACHE
4684 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4685 does something. Just do the default expansion to a call to
4686 __clear_cache(). */
4687 return NULL_RTX;
4688 #else
4689 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4690 does nothing. There is no need to call it. Do nothing. */
4691 return const0_rtx;
4692 #endif /* CLEAR_INSN_CACHE */
4693 #else
4694 /* We have a "clear_cache" insn, and it will handle everything. */
4695 tree begin, end;
4696 rtx begin_rtx, end_rtx;
4698 /* We must not expand to a library call. If we did, any
4699 fallback library function in libgcc that might contain a call to
4700 __builtin___clear_cache() would recurse infinitely. */
4701 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4703 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4704 return const0_rtx;
4707 if (HAVE_clear_cache)
4709 struct expand_operand ops[2];
4711 begin = CALL_EXPR_ARG (exp, 0);
4712 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4714 end = CALL_EXPR_ARG (exp, 1);
4715 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4717 create_address_operand (&ops[0], begin_rtx);
4718 create_address_operand (&ops[1], end_rtx);
4719 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4720 return const0_rtx;
4722 return const0_rtx;
4723 #endif /* HAVE_clear_cache */
4726 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4728 static rtx
4729 round_trampoline_addr (rtx tramp)
4731 rtx temp, addend, mask;
4733 /* If we don't need too much alignment, we'll have been guaranteed
4734 proper alignment by get_trampoline_type. */
4735 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4736 return tramp;
4738 /* Round address up to desired boundary. */
4739 temp = gen_reg_rtx (Pmode);
4740 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4741 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4743 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4744 temp, 0, OPTAB_LIB_WIDEN);
4745 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4746 temp, 0, OPTAB_LIB_WIDEN);
4748 return tramp;
4751 static rtx
4752 expand_builtin_init_trampoline (tree exp, bool onstack)
4754 tree t_tramp, t_func, t_chain;
4755 rtx m_tramp, r_tramp, r_chain, tmp;
4757 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4758 POINTER_TYPE, VOID_TYPE))
4759 return NULL_RTX;
4761 t_tramp = CALL_EXPR_ARG (exp, 0);
4762 t_func = CALL_EXPR_ARG (exp, 1);
4763 t_chain = CALL_EXPR_ARG (exp, 2);
4765 r_tramp = expand_normal (t_tramp);
4766 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4767 MEM_NOTRAP_P (m_tramp) = 1;
4769 /* If ONSTACK, the TRAMP argument should be the address of a field
4770 within the local function's FRAME decl. Either way, let's see if
4771 we can fill in the MEM_ATTRs for this memory. */
4772 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4773 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4775 /* Creator of a heap trampoline is responsible for making sure the
4776 address is aligned to at least STACK_BOUNDARY. Normally malloc
4777 will ensure this anyhow. */
4778 tmp = round_trampoline_addr (r_tramp);
4779 if (tmp != r_tramp)
4781 m_tramp = change_address (m_tramp, BLKmode, tmp);
4782 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4783 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4786 /* The FUNC argument should be the address of the nested function.
4787 Extract the actual function decl to pass to the hook. */
4788 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4789 t_func = TREE_OPERAND (t_func, 0);
4790 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4792 r_chain = expand_normal (t_chain);
4794 /* Generate insns to initialize the trampoline. */
4795 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4797 if (onstack)
4799 trampolines_created = 1;
4801 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4802 "trampoline generated for nested function %qD", t_func);
4805 return const0_rtx;
4808 static rtx
4809 expand_builtin_adjust_trampoline (tree exp)
4811 rtx tramp;
4813 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4814 return NULL_RTX;
4816 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4817 tramp = round_trampoline_addr (tramp);
4818 if (targetm.calls.trampoline_adjust_address)
4819 tramp = targetm.calls.trampoline_adjust_address (tramp);
4821 return tramp;
4824 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4825 function. The function first checks whether the back end provides
4826 an insn to implement signbit for the respective mode. If not, it
4827 checks whether the floating point format of the value is such that
4828 the sign bit can be extracted. If that is not the case, the
4829 function returns NULL_RTX to indicate that a normal call should be
4830 emitted rather than expanding the function in-line. EXP is the
4831 expression that is a call to the builtin function; if convenient,
4832 the result should be placed in TARGET. */
4833 static rtx
4834 expand_builtin_signbit (tree exp, rtx target)
4836 const struct real_format *fmt;
4837 enum machine_mode fmode, imode, rmode;
4838 tree arg;
4839 int word, bitpos;
4840 enum insn_code icode;
4841 rtx temp;
4842 location_t loc = EXPR_LOCATION (exp);
4844 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4845 return NULL_RTX;
4847 arg = CALL_EXPR_ARG (exp, 0);
4848 fmode = TYPE_MODE (TREE_TYPE (arg));
4849 rmode = TYPE_MODE (TREE_TYPE (exp));
4850 fmt = REAL_MODE_FORMAT (fmode);
4852 arg = builtin_save_expr (arg);
4854 /* Expand the argument yielding a RTX expression. */
4855 temp = expand_normal (arg);
4857 /* Check if the back end provides an insn that handles signbit for the
4858 argument's mode. */
4859 icode = optab_handler (signbit_optab, fmode);
4860 if (icode != CODE_FOR_nothing)
4862 rtx_insn *last = get_last_insn ();
4863 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4864 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4865 return target;
4866 delete_insns_since (last);
4869 /* For floating point formats without a sign bit, implement signbit
4870 as "ARG < 0.0". */
4871 bitpos = fmt->signbit_ro;
4872 if (bitpos < 0)
4874 /* But we can't do this if the format supports signed zero. */
4875 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4876 return NULL_RTX;
4878 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4879 build_real (TREE_TYPE (arg), dconst0));
4880 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4883 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4885 imode = int_mode_for_mode (fmode);
4886 if (imode == BLKmode)
4887 return NULL_RTX;
4888 temp = gen_lowpart (imode, temp);
4890 else
4892 imode = word_mode;
4893 /* Handle targets with different FP word orders. */
4894 if (FLOAT_WORDS_BIG_ENDIAN)
4895 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4896 else
4897 word = bitpos / BITS_PER_WORD;
4898 temp = operand_subword_force (temp, word, fmode);
4899 bitpos = bitpos % BITS_PER_WORD;
4902 /* Force the intermediate word_mode (or narrower) result into a
4903 register. This avoids attempting to create paradoxical SUBREGs
4904 of floating point modes below. */
4905 temp = force_reg (imode, temp);
4907 /* If the bitpos is within the "result mode" lowpart, the operation
4908 can be implement with a single bitwise AND. Otherwise, we need
4909 a right shift and an AND. */
4911 if (bitpos < GET_MODE_BITSIZE (rmode))
4913 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4915 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4916 temp = gen_lowpart (rmode, temp);
4917 temp = expand_binop (rmode, and_optab, temp,
4918 immed_wide_int_const (mask, rmode),
4919 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4921 else
4923 /* Perform a logical right shift to place the signbit in the least
4924 significant bit, then truncate the result to the desired mode
4925 and mask just this bit. */
4926 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4927 temp = gen_lowpart (rmode, temp);
4928 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4929 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4932 return temp;
4935 /* Expand fork or exec calls. TARGET is the desired target of the
4936 call. EXP is the call. FN is the
4937 identificator of the actual function. IGNORE is nonzero if the
4938 value is to be ignored. */
4940 static rtx
4941 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4943 tree id, decl;
4944 tree call;
4946 /* If we are not profiling, just call the function. */
4947 if (!profile_arc_flag)
4948 return NULL_RTX;
4950 /* Otherwise call the wrapper. This should be equivalent for the rest of
4951 compiler, so the code does not diverge, and the wrapper may run the
4952 code necessary for keeping the profiling sane. */
4954 switch (DECL_FUNCTION_CODE (fn))
4956 case BUILT_IN_FORK:
4957 id = get_identifier ("__gcov_fork");
4958 break;
4960 case BUILT_IN_EXECL:
4961 id = get_identifier ("__gcov_execl");
4962 break;
4964 case BUILT_IN_EXECV:
4965 id = get_identifier ("__gcov_execv");
4966 break;
4968 case BUILT_IN_EXECLP:
4969 id = get_identifier ("__gcov_execlp");
4970 break;
4972 case BUILT_IN_EXECLE:
4973 id = get_identifier ("__gcov_execle");
4974 break;
4976 case BUILT_IN_EXECVP:
4977 id = get_identifier ("__gcov_execvp");
4978 break;
4980 case BUILT_IN_EXECVE:
4981 id = get_identifier ("__gcov_execve");
4982 break;
4984 default:
4985 gcc_unreachable ();
4988 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4989 FUNCTION_DECL, id, TREE_TYPE (fn));
4990 DECL_EXTERNAL (decl) = 1;
4991 TREE_PUBLIC (decl) = 1;
4992 DECL_ARTIFICIAL (decl) = 1;
4993 TREE_NOTHROW (decl) = 1;
4994 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4995 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4996 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4997 return expand_call (call, target, ignore);
5002 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5003 the pointer in these functions is void*, the tree optimizers may remove
5004 casts. The mode computed in expand_builtin isn't reliable either, due
5005 to __sync_bool_compare_and_swap.
5007 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5008 group of builtins. This gives us log2 of the mode size. */
5010 static inline enum machine_mode
5011 get_builtin_sync_mode (int fcode_diff)
5013 /* The size is not negotiable, so ask not to get BLKmode in return
5014 if the target indicates that a smaller size would be better. */
5015 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5018 /* Expand the memory expression LOC and return the appropriate memory operand
5019 for the builtin_sync operations. */
5021 static rtx
5022 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5024 rtx addr, mem;
5026 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5027 addr = convert_memory_address (Pmode, addr);
5029 /* Note that we explicitly do not want any alias information for this
5030 memory, so that we kill all other live memories. Otherwise we don't
5031 satisfy the full barrier semantics of the intrinsic. */
5032 mem = validize_mem (gen_rtx_MEM (mode, addr));
5034 /* The alignment needs to be at least according to that of the mode. */
5035 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5036 get_pointer_alignment (loc)));
5037 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5038 MEM_VOLATILE_P (mem) = 1;
5040 return mem;
5043 /* Make sure an argument is in the right mode.
5044 EXP is the tree argument.
5045 MODE is the mode it should be in. */
5047 static rtx
5048 expand_expr_force_mode (tree exp, enum machine_mode mode)
5050 rtx val;
5051 enum machine_mode old_mode;
5053 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5054 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5055 of CONST_INTs, where we know the old_mode only from the call argument. */
5057 old_mode = GET_MODE (val);
5058 if (old_mode == VOIDmode)
5059 old_mode = TYPE_MODE (TREE_TYPE (exp));
5060 val = convert_modes (mode, old_mode, val, 1);
5061 return val;
5065 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5066 EXP is the CALL_EXPR. CODE is the rtx code
5067 that corresponds to the arithmetic or logical operation from the name;
5068 an exception here is that NOT actually means NAND. TARGET is an optional
5069 place for us to store the results; AFTER is true if this is the
5070 fetch_and_xxx form. */
5072 static rtx
5073 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5074 enum rtx_code code, bool after,
5075 rtx target)
5077 rtx val, mem;
5078 location_t loc = EXPR_LOCATION (exp);
5080 if (code == NOT && warn_sync_nand)
5082 tree fndecl = get_callee_fndecl (exp);
5083 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5085 static bool warned_f_a_n, warned_n_a_f;
5087 switch (fcode)
5089 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5090 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5091 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5092 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5093 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5094 if (warned_f_a_n)
5095 break;
5097 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5098 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5099 warned_f_a_n = true;
5100 break;
5102 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5103 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5104 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5105 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5106 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5107 if (warned_n_a_f)
5108 break;
5110 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5111 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5112 warned_n_a_f = true;
5113 break;
5115 default:
5116 gcc_unreachable ();
5120 /* Expand the operands. */
5121 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5122 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5124 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5125 after);
5128 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5129 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5130 true if this is the boolean form. TARGET is a place for us to store the
5131 results; this is NOT optional if IS_BOOL is true. */
5133 static rtx
5134 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5135 bool is_bool, rtx target)
5137 rtx old_val, new_val, mem;
5138 rtx *pbool, *poval;
5140 /* Expand the operands. */
5141 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5142 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5143 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5145 pbool = poval = NULL;
5146 if (target != const0_rtx)
5148 if (is_bool)
5149 pbool = &target;
5150 else
5151 poval = &target;
5153 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5154 false, MEMMODEL_SEQ_CST,
5155 MEMMODEL_SEQ_CST))
5156 return NULL_RTX;
5158 return target;
5161 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5162 general form is actually an atomic exchange, and some targets only
5163 support a reduced form with the second argument being a constant 1.
5164 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5165 the results. */
5167 static rtx
5168 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5169 rtx target)
5171 rtx val, mem;
5173 /* Expand the operands. */
5174 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5175 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5177 return expand_sync_lock_test_and_set (target, mem, val);
5180 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5182 static void
5183 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5185 rtx mem;
5187 /* Expand the operands. */
5188 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5190 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5193 /* Given an integer representing an ``enum memmodel'', verify its
5194 correctness and return the memory model enum. */
5196 static enum memmodel
5197 get_memmodel (tree exp)
5199 rtx op;
5200 unsigned HOST_WIDE_INT val;
5202 /* If the parameter is not a constant, it's a run time value so we'll just
5203 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5204 if (TREE_CODE (exp) != INTEGER_CST)
5205 return MEMMODEL_SEQ_CST;
5207 op = expand_normal (exp);
5209 val = INTVAL (op);
5210 if (targetm.memmodel_check)
5211 val = targetm.memmodel_check (val);
5212 else if (val & ~MEMMODEL_MASK)
5214 warning (OPT_Winvalid_memory_model,
5215 "Unknown architecture specifier in memory model to builtin.");
5216 return MEMMODEL_SEQ_CST;
5219 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5221 warning (OPT_Winvalid_memory_model,
5222 "invalid memory model argument to builtin");
5223 return MEMMODEL_SEQ_CST;
5226 return (enum memmodel) val;
5229 /* Expand the __atomic_exchange intrinsic:
5230 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5231 EXP is the CALL_EXPR.
5232 TARGET is an optional place for us to store the results. */
5234 static rtx
5235 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5237 rtx val, mem;
5238 enum memmodel model;
5240 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5241 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5243 error ("invalid memory model for %<__atomic_exchange%>");
5244 return NULL_RTX;
5247 if (!flag_inline_atomics)
5248 return NULL_RTX;
5250 /* Expand the operands. */
5251 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5252 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5254 return expand_atomic_exchange (target, mem, val, model);
5257 /* Expand the __atomic_compare_exchange intrinsic:
5258 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5259 TYPE desired, BOOL weak,
5260 enum memmodel success,
5261 enum memmodel failure)
5262 EXP is the CALL_EXPR.
5263 TARGET is an optional place for us to store the results. */
5265 static rtx
5266 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5267 rtx target)
5269 rtx expect, desired, mem, oldval;
5270 rtx_code_label *label;
5271 enum memmodel success, failure;
5272 tree weak;
5273 bool is_weak;
5275 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5276 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5278 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5279 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5281 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5282 return NULL_RTX;
5285 if (failure > success)
5287 error ("failure memory model cannot be stronger than success "
5288 "memory model for %<__atomic_compare_exchange%>");
5289 return NULL_RTX;
5292 if (!flag_inline_atomics)
5293 return NULL_RTX;
5295 /* Expand the operands. */
5296 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5298 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5299 expect = convert_memory_address (Pmode, expect);
5300 expect = gen_rtx_MEM (mode, expect);
5301 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5303 weak = CALL_EXPR_ARG (exp, 3);
5304 is_weak = false;
5305 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5306 is_weak = true;
5308 if (target == const0_rtx)
5309 target = NULL;
5311 /* Lest the rtl backend create a race condition with an imporoper store
5312 to memory, always create a new pseudo for OLDVAL. */
5313 oldval = NULL;
5315 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5316 is_weak, success, failure))
5317 return NULL_RTX;
5319 /* Conditionally store back to EXPECT, lest we create a race condition
5320 with an improper store to memory. */
5321 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5322 the normal case where EXPECT is totally private, i.e. a register. At
5323 which point the store can be unconditional. */
5324 label = gen_label_rtx ();
5325 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5326 emit_move_insn (expect, oldval);
5327 emit_label (label);
5329 return target;
5332 /* Expand the __atomic_load intrinsic:
5333 TYPE __atomic_load (TYPE *object, enum memmodel)
5334 EXP is the CALL_EXPR.
5335 TARGET is an optional place for us to store the results. */
5337 static rtx
5338 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5340 rtx mem;
5341 enum memmodel model;
5343 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5344 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5345 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5347 error ("invalid memory model for %<__atomic_load%>");
5348 return NULL_RTX;
5351 if (!flag_inline_atomics)
5352 return NULL_RTX;
5354 /* Expand the operand. */
5355 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5357 return expand_atomic_load (target, mem, model);
5361 /* Expand the __atomic_store intrinsic:
5362 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5363 EXP is the CALL_EXPR.
5364 TARGET is an optional place for us to store the results. */
5366 static rtx
5367 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5369 rtx mem, val;
5370 enum memmodel model;
5372 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5373 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5374 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5375 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5377 error ("invalid memory model for %<__atomic_store%>");
5378 return NULL_RTX;
5381 if (!flag_inline_atomics)
5382 return NULL_RTX;
5384 /* Expand the operands. */
5385 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5386 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5388 return expand_atomic_store (mem, val, model, false);
5391 /* Expand the __atomic_fetch_XXX intrinsic:
5392 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5393 EXP is the CALL_EXPR.
5394 TARGET is an optional place for us to store the results.
5395 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5396 FETCH_AFTER is true if returning the result of the operation.
5397 FETCH_AFTER is false if returning the value before the operation.
5398 IGNORE is true if the result is not used.
5399 EXT_CALL is the correct builtin for an external call if this cannot be
5400 resolved to an instruction sequence. */
5402 static rtx
5403 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5404 enum rtx_code code, bool fetch_after,
5405 bool ignore, enum built_in_function ext_call)
5407 rtx val, mem, ret;
5408 enum memmodel model;
5409 tree fndecl;
5410 tree addr;
5412 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5414 /* Expand the operands. */
5415 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5416 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5418 /* Only try generating instructions if inlining is turned on. */
5419 if (flag_inline_atomics)
5421 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5422 if (ret)
5423 return ret;
5426 /* Return if a different routine isn't needed for the library call. */
5427 if (ext_call == BUILT_IN_NONE)
5428 return NULL_RTX;
5430 /* Change the call to the specified function. */
5431 fndecl = get_callee_fndecl (exp);
5432 addr = CALL_EXPR_FN (exp);
5433 STRIP_NOPS (addr);
5435 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5436 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5438 /* Expand the call here so we can emit trailing code. */
5439 ret = expand_call (exp, target, ignore);
5441 /* Replace the original function just in case it matters. */
5442 TREE_OPERAND (addr, 0) = fndecl;
5444 /* Then issue the arithmetic correction to return the right result. */
5445 if (!ignore)
5447 if (code == NOT)
5449 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5450 OPTAB_LIB_WIDEN);
5451 ret = expand_simple_unop (mode, NOT, ret, target, true);
5453 else
5454 ret = expand_simple_binop (mode, code, ret, val, target, true,
5455 OPTAB_LIB_WIDEN);
5457 return ret;
5461 #ifndef HAVE_atomic_clear
5462 # define HAVE_atomic_clear 0
5463 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5464 #endif
5466 /* Expand an atomic clear operation.
5467 void _atomic_clear (BOOL *obj, enum memmodel)
5468 EXP is the call expression. */
5470 static rtx
5471 expand_builtin_atomic_clear (tree exp)
5473 enum machine_mode mode;
5474 rtx mem, ret;
5475 enum memmodel model;
5477 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5478 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5479 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5481 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5482 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5484 error ("invalid memory model for %<__atomic_store%>");
5485 return const0_rtx;
5488 if (HAVE_atomic_clear)
5490 emit_insn (gen_atomic_clear (mem, model));
5491 return const0_rtx;
5494 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5495 Failing that, a store is issued by __atomic_store. The only way this can
5496 fail is if the bool type is larger than a word size. Unlikely, but
5497 handle it anyway for completeness. Assume a single threaded model since
5498 there is no atomic support in this case, and no barriers are required. */
5499 ret = expand_atomic_store (mem, const0_rtx, model, true);
5500 if (!ret)
5501 emit_move_insn (mem, const0_rtx);
5502 return const0_rtx;
5505 /* Expand an atomic test_and_set operation.
5506 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5507 EXP is the call expression. */
5509 static rtx
5510 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5512 rtx mem;
5513 enum memmodel model;
5514 enum machine_mode mode;
5516 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5517 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5518 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5520 return expand_atomic_test_and_set (target, mem, model);
5524 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5525 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5527 static tree
5528 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5530 int size;
5531 enum machine_mode mode;
5532 unsigned int mode_align, type_align;
5534 if (TREE_CODE (arg0) != INTEGER_CST)
5535 return NULL_TREE;
5537 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5538 mode = mode_for_size (size, MODE_INT, 0);
5539 mode_align = GET_MODE_ALIGNMENT (mode);
5541 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5542 type_align = mode_align;
5543 else
5545 tree ttype = TREE_TYPE (arg1);
5547 /* This function is usually invoked and folded immediately by the front
5548 end before anything else has a chance to look at it. The pointer
5549 parameter at this point is usually cast to a void *, so check for that
5550 and look past the cast. */
5551 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5552 && VOID_TYPE_P (TREE_TYPE (ttype)))
5553 arg1 = TREE_OPERAND (arg1, 0);
5555 ttype = TREE_TYPE (arg1);
5556 gcc_assert (POINTER_TYPE_P (ttype));
5558 /* Get the underlying type of the object. */
5559 ttype = TREE_TYPE (ttype);
5560 type_align = TYPE_ALIGN (ttype);
5563 /* If the object has smaller alignment, the the lock free routines cannot
5564 be used. */
5565 if (type_align < mode_align)
5566 return boolean_false_node;
5568 /* Check if a compare_and_swap pattern exists for the mode which represents
5569 the required size. The pattern is not allowed to fail, so the existence
5570 of the pattern indicates support is present. */
5571 if (can_compare_and_swap_p (mode, true))
5572 return boolean_true_node;
5573 else
5574 return boolean_false_node;
5577 /* Return true if the parameters to call EXP represent an object which will
5578 always generate lock free instructions. The first argument represents the
5579 size of the object, and the second parameter is a pointer to the object
5580 itself. If NULL is passed for the object, then the result is based on
5581 typical alignment for an object of the specified size. Otherwise return
5582 false. */
5584 static rtx
5585 expand_builtin_atomic_always_lock_free (tree exp)
5587 tree size;
5588 tree arg0 = CALL_EXPR_ARG (exp, 0);
5589 tree arg1 = CALL_EXPR_ARG (exp, 1);
5591 if (TREE_CODE (arg0) != INTEGER_CST)
5593 error ("non-constant argument 1 to __atomic_always_lock_free");
5594 return const0_rtx;
5597 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5598 if (size == boolean_true_node)
5599 return const1_rtx;
5600 return const0_rtx;
5603 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5604 is lock free on this architecture. */
5606 static tree
5607 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5609 if (!flag_inline_atomics)
5610 return NULL_TREE;
5612 /* If it isn't always lock free, don't generate a result. */
5613 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5614 return boolean_true_node;
5616 return NULL_TREE;
5619 /* Return true if the parameters to call EXP represent an object which will
5620 always generate lock free instructions. The first argument represents the
5621 size of the object, and the second parameter is a pointer to the object
5622 itself. If NULL is passed for the object, then the result is based on
5623 typical alignment for an object of the specified size. Otherwise return
5624 NULL*/
5626 static rtx
5627 expand_builtin_atomic_is_lock_free (tree exp)
5629 tree size;
5630 tree arg0 = CALL_EXPR_ARG (exp, 0);
5631 tree arg1 = CALL_EXPR_ARG (exp, 1);
5633 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5635 error ("non-integer argument 1 to __atomic_is_lock_free");
5636 return NULL_RTX;
5639 if (!flag_inline_atomics)
5640 return NULL_RTX;
5642 /* If the value is known at compile time, return the RTX for it. */
5643 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5644 if (size == boolean_true_node)
5645 return const1_rtx;
5647 return NULL_RTX;
5650 /* Expand the __atomic_thread_fence intrinsic:
5651 void __atomic_thread_fence (enum memmodel)
5652 EXP is the CALL_EXPR. */
5654 static void
5655 expand_builtin_atomic_thread_fence (tree exp)
5657 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5658 expand_mem_thread_fence (model);
5661 /* Expand the __atomic_signal_fence intrinsic:
5662 void __atomic_signal_fence (enum memmodel)
5663 EXP is the CALL_EXPR. */
5665 static void
5666 expand_builtin_atomic_signal_fence (tree exp)
5668 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5669 expand_mem_signal_fence (model);
5672 /* Expand the __sync_synchronize intrinsic. */
5674 static void
5675 expand_builtin_sync_synchronize (void)
5677 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5680 static rtx
5681 expand_builtin_thread_pointer (tree exp, rtx target)
5683 enum insn_code icode;
5684 if (!validate_arglist (exp, VOID_TYPE))
5685 return const0_rtx;
5686 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5687 if (icode != CODE_FOR_nothing)
5689 struct expand_operand op;
5690 /* If the target is not sutitable then create a new target. */
5691 if (target == NULL_RTX
5692 || !REG_P (target)
5693 || GET_MODE (target) != Pmode)
5694 target = gen_reg_rtx (Pmode);
5695 create_output_operand (&op, target, Pmode);
5696 expand_insn (icode, 1, &op);
5697 return target;
5699 error ("__builtin_thread_pointer is not supported on this target");
5700 return const0_rtx;
5703 static void
5704 expand_builtin_set_thread_pointer (tree exp)
5706 enum insn_code icode;
5707 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5708 return;
5709 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5710 if (icode != CODE_FOR_nothing)
5712 struct expand_operand op;
5713 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5714 Pmode, EXPAND_NORMAL);
5715 create_input_operand (&op, val, Pmode);
5716 expand_insn (icode, 1, &op);
5717 return;
5719 error ("__builtin_set_thread_pointer is not supported on this target");
5723 /* Emit code to restore the current value of stack. */
5725 static void
5726 expand_stack_restore (tree var)
5728 rtx_insn *prev;
5729 rtx sa = expand_normal (var);
5731 sa = convert_memory_address (Pmode, sa);
5733 prev = get_last_insn ();
5734 emit_stack_restore (SAVE_BLOCK, sa);
5735 fixup_args_size_notes (prev, get_last_insn (), 0);
5739 /* Emit code to save the current value of stack. */
5741 static rtx
5742 expand_stack_save (void)
5744 rtx ret = NULL_RTX;
5746 do_pending_stack_adjust ();
5747 emit_stack_save (SAVE_BLOCK, &ret);
5748 return ret;
5751 /* Expand an expression EXP that calls a built-in function,
5752 with result going to TARGET if that's convenient
5753 (and in mode MODE if that's convenient).
5754 SUBTARGET may be used as the target for computing one of EXP's operands.
5755 IGNORE is nonzero if the value is to be ignored. */
5758 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5759 int ignore)
5761 tree fndecl = get_callee_fndecl (exp);
5762 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5763 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5764 int flags;
5766 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5767 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5769 /* When not optimizing, generate calls to library functions for a certain
5770 set of builtins. */
5771 if (!optimize
5772 && !called_as_built_in (fndecl)
5773 && fcode != BUILT_IN_FORK
5774 && fcode != BUILT_IN_EXECL
5775 && fcode != BUILT_IN_EXECV
5776 && fcode != BUILT_IN_EXECLP
5777 && fcode != BUILT_IN_EXECLE
5778 && fcode != BUILT_IN_EXECVP
5779 && fcode != BUILT_IN_EXECVE
5780 && fcode != BUILT_IN_ALLOCA
5781 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5782 && fcode != BUILT_IN_FREE)
5783 return expand_call (exp, target, ignore);
5785 /* The built-in function expanders test for target == const0_rtx
5786 to determine whether the function's result will be ignored. */
5787 if (ignore)
5788 target = const0_rtx;
5790 /* If the result of a pure or const built-in function is ignored, and
5791 none of its arguments are volatile, we can avoid expanding the
5792 built-in call and just evaluate the arguments for side-effects. */
5793 if (target == const0_rtx
5794 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5795 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5797 bool volatilep = false;
5798 tree arg;
5799 call_expr_arg_iterator iter;
5801 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5802 if (TREE_THIS_VOLATILE (arg))
5804 volatilep = true;
5805 break;
5808 if (! volatilep)
5810 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5811 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5812 return const0_rtx;
5816 switch (fcode)
5818 CASE_FLT_FN (BUILT_IN_FABS):
5819 case BUILT_IN_FABSD32:
5820 case BUILT_IN_FABSD64:
5821 case BUILT_IN_FABSD128:
5822 target = expand_builtin_fabs (exp, target, subtarget);
5823 if (target)
5824 return target;
5825 break;
5827 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5828 target = expand_builtin_copysign (exp, target, subtarget);
5829 if (target)
5830 return target;
5831 break;
5833 /* Just do a normal library call if we were unable to fold
5834 the values. */
5835 CASE_FLT_FN (BUILT_IN_CABS):
5836 break;
5838 CASE_FLT_FN (BUILT_IN_EXP):
5839 CASE_FLT_FN (BUILT_IN_EXP10):
5840 CASE_FLT_FN (BUILT_IN_POW10):
5841 CASE_FLT_FN (BUILT_IN_EXP2):
5842 CASE_FLT_FN (BUILT_IN_EXPM1):
5843 CASE_FLT_FN (BUILT_IN_LOGB):
5844 CASE_FLT_FN (BUILT_IN_LOG):
5845 CASE_FLT_FN (BUILT_IN_LOG10):
5846 CASE_FLT_FN (BUILT_IN_LOG2):
5847 CASE_FLT_FN (BUILT_IN_LOG1P):
5848 CASE_FLT_FN (BUILT_IN_TAN):
5849 CASE_FLT_FN (BUILT_IN_ASIN):
5850 CASE_FLT_FN (BUILT_IN_ACOS):
5851 CASE_FLT_FN (BUILT_IN_ATAN):
5852 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5853 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5854 because of possible accuracy problems. */
5855 if (! flag_unsafe_math_optimizations)
5856 break;
5857 CASE_FLT_FN (BUILT_IN_SQRT):
5858 CASE_FLT_FN (BUILT_IN_FLOOR):
5859 CASE_FLT_FN (BUILT_IN_CEIL):
5860 CASE_FLT_FN (BUILT_IN_TRUNC):
5861 CASE_FLT_FN (BUILT_IN_ROUND):
5862 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5863 CASE_FLT_FN (BUILT_IN_RINT):
5864 target = expand_builtin_mathfn (exp, target, subtarget);
5865 if (target)
5866 return target;
5867 break;
5869 CASE_FLT_FN (BUILT_IN_FMA):
5870 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5871 if (target)
5872 return target;
5873 break;
5875 CASE_FLT_FN (BUILT_IN_ILOGB):
5876 if (! flag_unsafe_math_optimizations)
5877 break;
5878 CASE_FLT_FN (BUILT_IN_ISINF):
5879 CASE_FLT_FN (BUILT_IN_FINITE):
5880 case BUILT_IN_ISFINITE:
5881 case BUILT_IN_ISNORMAL:
5882 target = expand_builtin_interclass_mathfn (exp, target);
5883 if (target)
5884 return target;
5885 break;
5887 CASE_FLT_FN (BUILT_IN_ICEIL):
5888 CASE_FLT_FN (BUILT_IN_LCEIL):
5889 CASE_FLT_FN (BUILT_IN_LLCEIL):
5890 CASE_FLT_FN (BUILT_IN_LFLOOR):
5891 CASE_FLT_FN (BUILT_IN_IFLOOR):
5892 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5893 target = expand_builtin_int_roundingfn (exp, target);
5894 if (target)
5895 return target;
5896 break;
5898 CASE_FLT_FN (BUILT_IN_IRINT):
5899 CASE_FLT_FN (BUILT_IN_LRINT):
5900 CASE_FLT_FN (BUILT_IN_LLRINT):
5901 CASE_FLT_FN (BUILT_IN_IROUND):
5902 CASE_FLT_FN (BUILT_IN_LROUND):
5903 CASE_FLT_FN (BUILT_IN_LLROUND):
5904 target = expand_builtin_int_roundingfn_2 (exp, target);
5905 if (target)
5906 return target;
5907 break;
5909 CASE_FLT_FN (BUILT_IN_POWI):
5910 target = expand_builtin_powi (exp, target);
5911 if (target)
5912 return target;
5913 break;
5915 CASE_FLT_FN (BUILT_IN_ATAN2):
5916 CASE_FLT_FN (BUILT_IN_LDEXP):
5917 CASE_FLT_FN (BUILT_IN_SCALB):
5918 CASE_FLT_FN (BUILT_IN_SCALBN):
5919 CASE_FLT_FN (BUILT_IN_SCALBLN):
5920 if (! flag_unsafe_math_optimizations)
5921 break;
5923 CASE_FLT_FN (BUILT_IN_FMOD):
5924 CASE_FLT_FN (BUILT_IN_REMAINDER):
5925 CASE_FLT_FN (BUILT_IN_DREM):
5926 CASE_FLT_FN (BUILT_IN_POW):
5927 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5928 if (target)
5929 return target;
5930 break;
5932 CASE_FLT_FN (BUILT_IN_CEXPI):
5933 target = expand_builtin_cexpi (exp, target);
5934 gcc_assert (target);
5935 return target;
5937 CASE_FLT_FN (BUILT_IN_SIN):
5938 CASE_FLT_FN (BUILT_IN_COS):
5939 if (! flag_unsafe_math_optimizations)
5940 break;
5941 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5942 if (target)
5943 return target;
5944 break;
5946 CASE_FLT_FN (BUILT_IN_SINCOS):
5947 if (! flag_unsafe_math_optimizations)
5948 break;
5949 target = expand_builtin_sincos (exp);
5950 if (target)
5951 return target;
5952 break;
5954 case BUILT_IN_APPLY_ARGS:
5955 return expand_builtin_apply_args ();
5957 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5958 FUNCTION with a copy of the parameters described by
5959 ARGUMENTS, and ARGSIZE. It returns a block of memory
5960 allocated on the stack into which is stored all the registers
5961 that might possibly be used for returning the result of a
5962 function. ARGUMENTS is the value returned by
5963 __builtin_apply_args. ARGSIZE is the number of bytes of
5964 arguments that must be copied. ??? How should this value be
5965 computed? We'll also need a safe worst case value for varargs
5966 functions. */
5967 case BUILT_IN_APPLY:
5968 if (!validate_arglist (exp, POINTER_TYPE,
5969 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5970 && !validate_arglist (exp, REFERENCE_TYPE,
5971 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5972 return const0_rtx;
5973 else
5975 rtx ops[3];
5977 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5978 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5979 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5981 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5984 /* __builtin_return (RESULT) causes the function to return the
5985 value described by RESULT. RESULT is address of the block of
5986 memory returned by __builtin_apply. */
5987 case BUILT_IN_RETURN:
5988 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5989 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5990 return const0_rtx;
5992 case BUILT_IN_SAVEREGS:
5993 return expand_builtin_saveregs ();
5995 case BUILT_IN_VA_ARG_PACK:
5996 /* All valid uses of __builtin_va_arg_pack () are removed during
5997 inlining. */
5998 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5999 return const0_rtx;
6001 case BUILT_IN_VA_ARG_PACK_LEN:
6002 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6003 inlining. */
6004 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6005 return const0_rtx;
6007 /* Return the address of the first anonymous stack arg. */
6008 case BUILT_IN_NEXT_ARG:
6009 if (fold_builtin_next_arg (exp, false))
6010 return const0_rtx;
6011 return expand_builtin_next_arg ();
6013 case BUILT_IN_CLEAR_CACHE:
6014 target = expand_builtin___clear_cache (exp);
6015 if (target)
6016 return target;
6017 break;
6019 case BUILT_IN_CLASSIFY_TYPE:
6020 return expand_builtin_classify_type (exp);
6022 case BUILT_IN_CONSTANT_P:
6023 return const0_rtx;
6025 case BUILT_IN_FRAME_ADDRESS:
6026 case BUILT_IN_RETURN_ADDRESS:
6027 return expand_builtin_frame_address (fndecl, exp);
6029 /* Returns the address of the area where the structure is returned.
6030 0 otherwise. */
6031 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6032 if (call_expr_nargs (exp) != 0
6033 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6034 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6035 return const0_rtx;
6036 else
6037 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6039 case BUILT_IN_ALLOCA:
6040 case BUILT_IN_ALLOCA_WITH_ALIGN:
6041 /* If the allocation stems from the declaration of a variable-sized
6042 object, it cannot accumulate. */
6043 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6044 if (target)
6045 return target;
6046 break;
6048 case BUILT_IN_STACK_SAVE:
6049 return expand_stack_save ();
6051 case BUILT_IN_STACK_RESTORE:
6052 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6053 return const0_rtx;
6055 case BUILT_IN_BSWAP16:
6056 case BUILT_IN_BSWAP32:
6057 case BUILT_IN_BSWAP64:
6058 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6059 if (target)
6060 return target;
6061 break;
6063 CASE_INT_FN (BUILT_IN_FFS):
6064 target = expand_builtin_unop (target_mode, exp, target,
6065 subtarget, ffs_optab);
6066 if (target)
6067 return target;
6068 break;
6070 CASE_INT_FN (BUILT_IN_CLZ):
6071 target = expand_builtin_unop (target_mode, exp, target,
6072 subtarget, clz_optab);
6073 if (target)
6074 return target;
6075 break;
6077 CASE_INT_FN (BUILT_IN_CTZ):
6078 target = expand_builtin_unop (target_mode, exp, target,
6079 subtarget, ctz_optab);
6080 if (target)
6081 return target;
6082 break;
6084 CASE_INT_FN (BUILT_IN_CLRSB):
6085 target = expand_builtin_unop (target_mode, exp, target,
6086 subtarget, clrsb_optab);
6087 if (target)
6088 return target;
6089 break;
6091 CASE_INT_FN (BUILT_IN_POPCOUNT):
6092 target = expand_builtin_unop (target_mode, exp, target,
6093 subtarget, popcount_optab);
6094 if (target)
6095 return target;
6096 break;
6098 CASE_INT_FN (BUILT_IN_PARITY):
6099 target = expand_builtin_unop (target_mode, exp, target,
6100 subtarget, parity_optab);
6101 if (target)
6102 return target;
6103 break;
6105 case BUILT_IN_STRLEN:
6106 target = expand_builtin_strlen (exp, target, target_mode);
6107 if (target)
6108 return target;
6109 break;
6111 case BUILT_IN_STRCPY:
6112 target = expand_builtin_strcpy (exp, target);
6113 if (target)
6114 return target;
6115 break;
6117 case BUILT_IN_STRNCPY:
6118 target = expand_builtin_strncpy (exp, target);
6119 if (target)
6120 return target;
6121 break;
6123 case BUILT_IN_STPCPY:
6124 target = expand_builtin_stpcpy (exp, target, mode);
6125 if (target)
6126 return target;
6127 break;
6129 case BUILT_IN_MEMCPY:
6130 target = expand_builtin_memcpy (exp, target);
6131 if (target)
6132 return target;
6133 break;
6135 case BUILT_IN_MEMPCPY:
6136 target = expand_builtin_mempcpy (exp, target, mode);
6137 if (target)
6138 return target;
6139 break;
6141 case BUILT_IN_MEMSET:
6142 target = expand_builtin_memset (exp, target, mode);
6143 if (target)
6144 return target;
6145 break;
6147 case BUILT_IN_BZERO:
6148 target = expand_builtin_bzero (exp);
6149 if (target)
6150 return target;
6151 break;
6153 case BUILT_IN_STRCMP:
6154 target = expand_builtin_strcmp (exp, target);
6155 if (target)
6156 return target;
6157 break;
6159 case BUILT_IN_STRNCMP:
6160 target = expand_builtin_strncmp (exp, target, mode);
6161 if (target)
6162 return target;
6163 break;
6165 case BUILT_IN_BCMP:
6166 case BUILT_IN_MEMCMP:
6167 target = expand_builtin_memcmp (exp, target, mode);
6168 if (target)
6169 return target;
6170 break;
6172 case BUILT_IN_SETJMP:
6173 /* This should have been lowered to the builtins below. */
6174 gcc_unreachable ();
6176 case BUILT_IN_SETJMP_SETUP:
6177 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6178 and the receiver label. */
6179 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6181 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6182 VOIDmode, EXPAND_NORMAL);
6183 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6184 rtx label_r = label_rtx (label);
6186 /* This is copied from the handling of non-local gotos. */
6187 expand_builtin_setjmp_setup (buf_addr, label_r);
6188 nonlocal_goto_handler_labels
6189 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6190 nonlocal_goto_handler_labels);
6191 /* ??? Do not let expand_label treat us as such since we would
6192 not want to be both on the list of non-local labels and on
6193 the list of forced labels. */
6194 FORCED_LABEL (label) = 0;
6195 return const0_rtx;
6197 break;
6199 case BUILT_IN_SETJMP_RECEIVER:
6200 /* __builtin_setjmp_receiver is passed the receiver label. */
6201 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6203 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6204 rtx label_r = label_rtx (label);
6206 expand_builtin_setjmp_receiver (label_r);
6207 return const0_rtx;
6209 break;
6211 /* __builtin_longjmp is passed a pointer to an array of five words.
6212 It's similar to the C library longjmp function but works with
6213 __builtin_setjmp above. */
6214 case BUILT_IN_LONGJMP:
6215 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6217 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6218 VOIDmode, EXPAND_NORMAL);
6219 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6221 if (value != const1_rtx)
6223 error ("%<__builtin_longjmp%> second argument must be 1");
6224 return const0_rtx;
6227 expand_builtin_longjmp (buf_addr, value);
6228 return const0_rtx;
6230 break;
6232 case BUILT_IN_NONLOCAL_GOTO:
6233 target = expand_builtin_nonlocal_goto (exp);
6234 if (target)
6235 return target;
6236 break;
6238 /* This updates the setjmp buffer that is its argument with the value
6239 of the current stack pointer. */
6240 case BUILT_IN_UPDATE_SETJMP_BUF:
6241 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6243 rtx buf_addr
6244 = expand_normal (CALL_EXPR_ARG (exp, 0));
6246 expand_builtin_update_setjmp_buf (buf_addr);
6247 return const0_rtx;
6249 break;
6251 case BUILT_IN_TRAP:
6252 expand_builtin_trap ();
6253 return const0_rtx;
6255 case BUILT_IN_UNREACHABLE:
6256 expand_builtin_unreachable ();
6257 return const0_rtx;
6259 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6260 case BUILT_IN_SIGNBITD32:
6261 case BUILT_IN_SIGNBITD64:
6262 case BUILT_IN_SIGNBITD128:
6263 target = expand_builtin_signbit (exp, target);
6264 if (target)
6265 return target;
6266 break;
6268 /* Various hooks for the DWARF 2 __throw routine. */
6269 case BUILT_IN_UNWIND_INIT:
6270 expand_builtin_unwind_init ();
6271 return const0_rtx;
6272 case BUILT_IN_DWARF_CFA:
6273 return virtual_cfa_rtx;
6274 #ifdef DWARF2_UNWIND_INFO
6275 case BUILT_IN_DWARF_SP_COLUMN:
6276 return expand_builtin_dwarf_sp_column ();
6277 case BUILT_IN_INIT_DWARF_REG_SIZES:
6278 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6279 return const0_rtx;
6280 #endif
6281 case BUILT_IN_FROB_RETURN_ADDR:
6282 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6283 case BUILT_IN_EXTRACT_RETURN_ADDR:
6284 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6285 case BUILT_IN_EH_RETURN:
6286 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6287 CALL_EXPR_ARG (exp, 1));
6288 return const0_rtx;
6289 #ifdef EH_RETURN_DATA_REGNO
6290 case BUILT_IN_EH_RETURN_DATA_REGNO:
6291 return expand_builtin_eh_return_data_regno (exp);
6292 #endif
6293 case BUILT_IN_EXTEND_POINTER:
6294 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6295 case BUILT_IN_EH_POINTER:
6296 return expand_builtin_eh_pointer (exp);
6297 case BUILT_IN_EH_FILTER:
6298 return expand_builtin_eh_filter (exp);
6299 case BUILT_IN_EH_COPY_VALUES:
6300 return expand_builtin_eh_copy_values (exp);
6302 case BUILT_IN_VA_START:
6303 return expand_builtin_va_start (exp);
6304 case BUILT_IN_VA_END:
6305 return expand_builtin_va_end (exp);
6306 case BUILT_IN_VA_COPY:
6307 return expand_builtin_va_copy (exp);
6308 case BUILT_IN_EXPECT:
6309 return expand_builtin_expect (exp, target);
6310 case BUILT_IN_ASSUME_ALIGNED:
6311 return expand_builtin_assume_aligned (exp, target);
6312 case BUILT_IN_PREFETCH:
6313 expand_builtin_prefetch (exp);
6314 return const0_rtx;
6316 case BUILT_IN_INIT_TRAMPOLINE:
6317 return expand_builtin_init_trampoline (exp, true);
6318 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6319 return expand_builtin_init_trampoline (exp, false);
6320 case BUILT_IN_ADJUST_TRAMPOLINE:
6321 return expand_builtin_adjust_trampoline (exp);
6323 case BUILT_IN_FORK:
6324 case BUILT_IN_EXECL:
6325 case BUILT_IN_EXECV:
6326 case BUILT_IN_EXECLP:
6327 case BUILT_IN_EXECLE:
6328 case BUILT_IN_EXECVP:
6329 case BUILT_IN_EXECVE:
6330 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6331 if (target)
6332 return target;
6333 break;
6335 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6336 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6337 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6338 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6339 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6340 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6341 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6342 if (target)
6343 return target;
6344 break;
6346 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6347 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6348 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6349 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6350 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6351 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6352 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6353 if (target)
6354 return target;
6355 break;
6357 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6358 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6359 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6360 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6361 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6362 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6363 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6364 if (target)
6365 return target;
6366 break;
6368 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6369 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6370 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6371 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6372 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6373 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6374 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6375 if (target)
6376 return target;
6377 break;
6379 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6380 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6381 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6382 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6383 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6384 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6385 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6386 if (target)
6387 return target;
6388 break;
6390 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6391 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6392 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6393 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6394 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6395 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6396 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6397 if (target)
6398 return target;
6399 break;
6401 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6402 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6403 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6404 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6405 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6406 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6407 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6408 if (target)
6409 return target;
6410 break;
6412 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6413 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6414 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6415 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6416 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6417 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6418 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6419 if (target)
6420 return target;
6421 break;
6423 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6424 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6425 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6426 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6427 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6428 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6429 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6430 if (target)
6431 return target;
6432 break;
6434 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6435 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6436 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6437 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6438 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6439 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6440 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6441 if (target)
6442 return target;
6443 break;
6445 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6446 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6447 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6448 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6449 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6450 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6451 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6452 if (target)
6453 return target;
6454 break;
6456 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6457 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6458 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6459 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6460 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6461 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6462 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6463 if (target)
6464 return target;
6465 break;
6467 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6468 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6469 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6470 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6471 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6472 if (mode == VOIDmode)
6473 mode = TYPE_MODE (boolean_type_node);
6474 if (!target || !register_operand (target, mode))
6475 target = gen_reg_rtx (mode);
6477 mode = get_builtin_sync_mode
6478 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6479 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6480 if (target)
6481 return target;
6482 break;
6484 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6485 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6486 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6487 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6488 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6489 mode = get_builtin_sync_mode
6490 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6491 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6492 if (target)
6493 return target;
6494 break;
6496 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6497 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6498 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6499 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6500 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6501 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6502 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6503 if (target)
6504 return target;
6505 break;
6507 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6508 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6509 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6510 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6511 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6512 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6513 expand_builtin_sync_lock_release (mode, exp);
6514 return const0_rtx;
6516 case BUILT_IN_SYNC_SYNCHRONIZE:
6517 expand_builtin_sync_synchronize ();
6518 return const0_rtx;
6520 case BUILT_IN_ATOMIC_EXCHANGE_1:
6521 case BUILT_IN_ATOMIC_EXCHANGE_2:
6522 case BUILT_IN_ATOMIC_EXCHANGE_4:
6523 case BUILT_IN_ATOMIC_EXCHANGE_8:
6524 case BUILT_IN_ATOMIC_EXCHANGE_16:
6525 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6526 target = expand_builtin_atomic_exchange (mode, exp, target);
6527 if (target)
6528 return target;
6529 break;
6531 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6532 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6533 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6534 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6535 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6537 unsigned int nargs, z;
6538 vec<tree, va_gc> *vec;
6540 mode =
6541 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6542 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6543 if (target)
6544 return target;
6546 /* If this is turned into an external library call, the weak parameter
6547 must be dropped to match the expected parameter list. */
6548 nargs = call_expr_nargs (exp);
6549 vec_alloc (vec, nargs - 1);
6550 for (z = 0; z < 3; z++)
6551 vec->quick_push (CALL_EXPR_ARG (exp, z));
6552 /* Skip the boolean weak parameter. */
6553 for (z = 4; z < 6; z++)
6554 vec->quick_push (CALL_EXPR_ARG (exp, z));
6555 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6556 break;
6559 case BUILT_IN_ATOMIC_LOAD_1:
6560 case BUILT_IN_ATOMIC_LOAD_2:
6561 case BUILT_IN_ATOMIC_LOAD_4:
6562 case BUILT_IN_ATOMIC_LOAD_8:
6563 case BUILT_IN_ATOMIC_LOAD_16:
6564 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6565 target = expand_builtin_atomic_load (mode, exp, target);
6566 if (target)
6567 return target;
6568 break;
6570 case BUILT_IN_ATOMIC_STORE_1:
6571 case BUILT_IN_ATOMIC_STORE_2:
6572 case BUILT_IN_ATOMIC_STORE_4:
6573 case BUILT_IN_ATOMIC_STORE_8:
6574 case BUILT_IN_ATOMIC_STORE_16:
6575 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6576 target = expand_builtin_atomic_store (mode, exp);
6577 if (target)
6578 return const0_rtx;
6579 break;
6581 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6582 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6583 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6584 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6585 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6587 enum built_in_function lib;
6588 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6589 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6590 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6591 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6592 ignore, lib);
6593 if (target)
6594 return target;
6595 break;
6597 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6598 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6599 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6600 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6601 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6603 enum built_in_function lib;
6604 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6605 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6606 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6607 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6608 ignore, lib);
6609 if (target)
6610 return target;
6611 break;
6613 case BUILT_IN_ATOMIC_AND_FETCH_1:
6614 case BUILT_IN_ATOMIC_AND_FETCH_2:
6615 case BUILT_IN_ATOMIC_AND_FETCH_4:
6616 case BUILT_IN_ATOMIC_AND_FETCH_8:
6617 case BUILT_IN_ATOMIC_AND_FETCH_16:
6619 enum built_in_function lib;
6620 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6621 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6622 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6623 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6624 ignore, lib);
6625 if (target)
6626 return target;
6627 break;
6629 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6630 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6631 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6632 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6633 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6635 enum built_in_function lib;
6636 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6637 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6638 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6639 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6640 ignore, lib);
6641 if (target)
6642 return target;
6643 break;
6645 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6646 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6647 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6648 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6649 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6651 enum built_in_function lib;
6652 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6653 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6654 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6655 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6656 ignore, lib);
6657 if (target)
6658 return target;
6659 break;
6661 case BUILT_IN_ATOMIC_OR_FETCH_1:
6662 case BUILT_IN_ATOMIC_OR_FETCH_2:
6663 case BUILT_IN_ATOMIC_OR_FETCH_4:
6664 case BUILT_IN_ATOMIC_OR_FETCH_8:
6665 case BUILT_IN_ATOMIC_OR_FETCH_16:
6667 enum built_in_function lib;
6668 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6669 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6670 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6671 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6672 ignore, lib);
6673 if (target)
6674 return target;
6675 break;
6677 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6678 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6679 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6680 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6681 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6682 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6683 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6684 ignore, BUILT_IN_NONE);
6685 if (target)
6686 return target;
6687 break;
6689 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6690 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6691 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6692 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6693 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6694 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6695 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6696 ignore, BUILT_IN_NONE);
6697 if (target)
6698 return target;
6699 break;
6701 case BUILT_IN_ATOMIC_FETCH_AND_1:
6702 case BUILT_IN_ATOMIC_FETCH_AND_2:
6703 case BUILT_IN_ATOMIC_FETCH_AND_4:
6704 case BUILT_IN_ATOMIC_FETCH_AND_8:
6705 case BUILT_IN_ATOMIC_FETCH_AND_16:
6706 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6707 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6708 ignore, BUILT_IN_NONE);
6709 if (target)
6710 return target;
6711 break;
6713 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6714 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6715 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6716 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6717 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6718 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6719 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6720 ignore, BUILT_IN_NONE);
6721 if (target)
6722 return target;
6723 break;
6725 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6726 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6727 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6728 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6729 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6730 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6731 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6732 ignore, BUILT_IN_NONE);
6733 if (target)
6734 return target;
6735 break;
6737 case BUILT_IN_ATOMIC_FETCH_OR_1:
6738 case BUILT_IN_ATOMIC_FETCH_OR_2:
6739 case BUILT_IN_ATOMIC_FETCH_OR_4:
6740 case BUILT_IN_ATOMIC_FETCH_OR_8:
6741 case BUILT_IN_ATOMIC_FETCH_OR_16:
6742 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6743 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6744 ignore, BUILT_IN_NONE);
6745 if (target)
6746 return target;
6747 break;
6749 case BUILT_IN_ATOMIC_TEST_AND_SET:
6750 return expand_builtin_atomic_test_and_set (exp, target);
6752 case BUILT_IN_ATOMIC_CLEAR:
6753 return expand_builtin_atomic_clear (exp);
6755 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6756 return expand_builtin_atomic_always_lock_free (exp);
6758 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6759 target = expand_builtin_atomic_is_lock_free (exp);
6760 if (target)
6761 return target;
6762 break;
6764 case BUILT_IN_ATOMIC_THREAD_FENCE:
6765 expand_builtin_atomic_thread_fence (exp);
6766 return const0_rtx;
6768 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6769 expand_builtin_atomic_signal_fence (exp);
6770 return const0_rtx;
6772 case BUILT_IN_OBJECT_SIZE:
6773 return expand_builtin_object_size (exp);
6775 case BUILT_IN_MEMCPY_CHK:
6776 case BUILT_IN_MEMPCPY_CHK:
6777 case BUILT_IN_MEMMOVE_CHK:
6778 case BUILT_IN_MEMSET_CHK:
6779 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6780 if (target)
6781 return target;
6782 break;
6784 case BUILT_IN_STRCPY_CHK:
6785 case BUILT_IN_STPCPY_CHK:
6786 case BUILT_IN_STRNCPY_CHK:
6787 case BUILT_IN_STPNCPY_CHK:
6788 case BUILT_IN_STRCAT_CHK:
6789 case BUILT_IN_STRNCAT_CHK:
6790 case BUILT_IN_SNPRINTF_CHK:
6791 case BUILT_IN_VSNPRINTF_CHK:
6792 maybe_emit_chk_warning (exp, fcode);
6793 break;
6795 case BUILT_IN_SPRINTF_CHK:
6796 case BUILT_IN_VSPRINTF_CHK:
6797 maybe_emit_sprintf_chk_warning (exp, fcode);
6798 break;
6800 case BUILT_IN_FREE:
6801 if (warn_free_nonheap_object)
6802 maybe_emit_free_warning (exp);
6803 break;
6805 case BUILT_IN_THREAD_POINTER:
6806 return expand_builtin_thread_pointer (exp, target);
6808 case BUILT_IN_SET_THREAD_POINTER:
6809 expand_builtin_set_thread_pointer (exp);
6810 return const0_rtx;
6812 case BUILT_IN_CILK_DETACH:
6813 expand_builtin_cilk_detach (exp);
6814 return const0_rtx;
6816 case BUILT_IN_CILK_POP_FRAME:
6817 expand_builtin_cilk_pop_frame (exp);
6818 return const0_rtx;
6820 default: /* just do library call, if unknown builtin */
6821 break;
6824 /* The switch statement above can drop through to cause the function
6825 to be called normally. */
6826 return expand_call (exp, target, ignore);
6829 /* Determine whether a tree node represents a call to a built-in
6830 function. If the tree T is a call to a built-in function with
6831 the right number of arguments of the appropriate types, return
6832 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6833 Otherwise the return value is END_BUILTINS. */
6835 enum built_in_function
6836 builtin_mathfn_code (const_tree t)
6838 const_tree fndecl, arg, parmlist;
6839 const_tree argtype, parmtype;
6840 const_call_expr_arg_iterator iter;
6842 if (TREE_CODE (t) != CALL_EXPR
6843 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6844 return END_BUILTINS;
6846 fndecl = get_callee_fndecl (t);
6847 if (fndecl == NULL_TREE
6848 || TREE_CODE (fndecl) != FUNCTION_DECL
6849 || ! DECL_BUILT_IN (fndecl)
6850 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6851 return END_BUILTINS;
6853 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6854 init_const_call_expr_arg_iterator (t, &iter);
6855 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6857 /* If a function doesn't take a variable number of arguments,
6858 the last element in the list will have type `void'. */
6859 parmtype = TREE_VALUE (parmlist);
6860 if (VOID_TYPE_P (parmtype))
6862 if (more_const_call_expr_args_p (&iter))
6863 return END_BUILTINS;
6864 return DECL_FUNCTION_CODE (fndecl);
6867 if (! more_const_call_expr_args_p (&iter))
6868 return END_BUILTINS;
6870 arg = next_const_call_expr_arg (&iter);
6871 argtype = TREE_TYPE (arg);
6873 if (SCALAR_FLOAT_TYPE_P (parmtype))
6875 if (! SCALAR_FLOAT_TYPE_P (argtype))
6876 return END_BUILTINS;
6878 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6880 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6881 return END_BUILTINS;
6883 else if (POINTER_TYPE_P (parmtype))
6885 if (! POINTER_TYPE_P (argtype))
6886 return END_BUILTINS;
6888 else if (INTEGRAL_TYPE_P (parmtype))
6890 if (! INTEGRAL_TYPE_P (argtype))
6891 return END_BUILTINS;
6893 else
6894 return END_BUILTINS;
6897 /* Variable-length argument list. */
6898 return DECL_FUNCTION_CODE (fndecl);
6901 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6902 evaluate to a constant. */
6904 static tree
6905 fold_builtin_constant_p (tree arg)
6907 /* We return 1 for a numeric type that's known to be a constant
6908 value at compile-time or for an aggregate type that's a
6909 literal constant. */
6910 STRIP_NOPS (arg);
6912 /* If we know this is a constant, emit the constant of one. */
6913 if (CONSTANT_CLASS_P (arg)
6914 || (TREE_CODE (arg) == CONSTRUCTOR
6915 && TREE_CONSTANT (arg)))
6916 return integer_one_node;
6917 if (TREE_CODE (arg) == ADDR_EXPR)
6919 tree op = TREE_OPERAND (arg, 0);
6920 if (TREE_CODE (op) == STRING_CST
6921 || (TREE_CODE (op) == ARRAY_REF
6922 && integer_zerop (TREE_OPERAND (op, 1))
6923 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6924 return integer_one_node;
6927 /* If this expression has side effects, show we don't know it to be a
6928 constant. Likewise if it's a pointer or aggregate type since in
6929 those case we only want literals, since those are only optimized
6930 when generating RTL, not later.
6931 And finally, if we are compiling an initializer, not code, we
6932 need to return a definite result now; there's not going to be any
6933 more optimization done. */
6934 if (TREE_SIDE_EFFECTS (arg)
6935 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6936 || POINTER_TYPE_P (TREE_TYPE (arg))
6937 || cfun == 0
6938 || folding_initializer
6939 || force_folding_builtin_constant_p)
6940 return integer_zero_node;
6942 return NULL_TREE;
6945 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6946 return it as a truthvalue. */
6948 static tree
6949 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
6950 tree predictor)
6952 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6954 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6955 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6956 ret_type = TREE_TYPE (TREE_TYPE (fn));
6957 pred_type = TREE_VALUE (arg_types);
6958 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6960 pred = fold_convert_loc (loc, pred_type, pred);
6961 expected = fold_convert_loc (loc, expected_type, expected);
6962 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
6963 predictor);
6965 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6966 build_int_cst (ret_type, 0));
6969 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6970 NULL_TREE if no simplification is possible. */
6972 tree
6973 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
6975 tree inner, fndecl, inner_arg0;
6976 enum tree_code code;
6978 /* Distribute the expected value over short-circuiting operators.
6979 See through the cast from truthvalue_type_node to long. */
6980 inner_arg0 = arg0;
6981 while (TREE_CODE (inner_arg0) == NOP_EXPR
6982 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
6983 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
6984 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
6986 /* If this is a builtin_expect within a builtin_expect keep the
6987 inner one. See through a comparison against a constant. It
6988 might have been added to create a thruthvalue. */
6989 inner = inner_arg0;
6991 if (COMPARISON_CLASS_P (inner)
6992 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6993 inner = TREE_OPERAND (inner, 0);
6995 if (TREE_CODE (inner) == CALL_EXPR
6996 && (fndecl = get_callee_fndecl (inner))
6997 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6998 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6999 return arg0;
7001 inner = inner_arg0;
7002 code = TREE_CODE (inner);
7003 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7005 tree op0 = TREE_OPERAND (inner, 0);
7006 tree op1 = TREE_OPERAND (inner, 1);
7008 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7009 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7010 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7012 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7015 /* If the argument isn't invariant then there's nothing else we can do. */
7016 if (!TREE_CONSTANT (inner_arg0))
7017 return NULL_TREE;
7019 /* If we expect that a comparison against the argument will fold to
7020 a constant return the constant. In practice, this means a true
7021 constant or the address of a non-weak symbol. */
7022 inner = inner_arg0;
7023 STRIP_NOPS (inner);
7024 if (TREE_CODE (inner) == ADDR_EXPR)
7028 inner = TREE_OPERAND (inner, 0);
7030 while (TREE_CODE (inner) == COMPONENT_REF
7031 || TREE_CODE (inner) == ARRAY_REF);
7032 if ((TREE_CODE (inner) == VAR_DECL
7033 || TREE_CODE (inner) == FUNCTION_DECL)
7034 && DECL_WEAK (inner))
7035 return NULL_TREE;
7038 /* Otherwise, ARG0 already has the proper type for the return value. */
7039 return arg0;
7042 /* Fold a call to __builtin_classify_type with argument ARG. */
7044 static tree
7045 fold_builtin_classify_type (tree arg)
7047 if (arg == 0)
7048 return build_int_cst (integer_type_node, no_type_class);
7050 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7053 /* Fold a call to __builtin_strlen with argument ARG. */
7055 static tree
7056 fold_builtin_strlen (location_t loc, tree type, tree arg)
7058 if (!validate_arg (arg, POINTER_TYPE))
7059 return NULL_TREE;
7060 else
7062 tree len = c_strlen (arg, 0);
7064 if (len)
7065 return fold_convert_loc (loc, type, len);
7067 return NULL_TREE;
7071 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7073 static tree
7074 fold_builtin_inf (location_t loc, tree type, int warn)
7076 REAL_VALUE_TYPE real;
7078 /* __builtin_inff is intended to be usable to define INFINITY on all
7079 targets. If an infinity is not available, INFINITY expands "to a
7080 positive constant of type float that overflows at translation
7081 time", footnote "In this case, using INFINITY will violate the
7082 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7083 Thus we pedwarn to ensure this constraint violation is
7084 diagnosed. */
7085 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7086 pedwarn (loc, 0, "target format does not support infinity");
7088 real_inf (&real);
7089 return build_real (type, real);
7092 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7094 static tree
7095 fold_builtin_nan (tree arg, tree type, int quiet)
7097 REAL_VALUE_TYPE real;
7098 const char *str;
7100 if (!validate_arg (arg, POINTER_TYPE))
7101 return NULL_TREE;
7102 str = c_getstr (arg);
7103 if (!str)
7104 return NULL_TREE;
7106 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7107 return NULL_TREE;
7109 return build_real (type, real);
7112 /* Return true if the floating point expression T has an integer value.
7113 We also allow +Inf, -Inf and NaN to be considered integer values. */
7115 static bool
7116 integer_valued_real_p (tree t)
7118 switch (TREE_CODE (t))
7120 case FLOAT_EXPR:
7121 return true;
7123 case ABS_EXPR:
7124 case SAVE_EXPR:
7125 return integer_valued_real_p (TREE_OPERAND (t, 0));
7127 case COMPOUND_EXPR:
7128 case MODIFY_EXPR:
7129 case BIND_EXPR:
7130 return integer_valued_real_p (TREE_OPERAND (t, 1));
7132 case PLUS_EXPR:
7133 case MINUS_EXPR:
7134 case MULT_EXPR:
7135 case MIN_EXPR:
7136 case MAX_EXPR:
7137 return integer_valued_real_p (TREE_OPERAND (t, 0))
7138 && integer_valued_real_p (TREE_OPERAND (t, 1));
7140 case COND_EXPR:
7141 return integer_valued_real_p (TREE_OPERAND (t, 1))
7142 && integer_valued_real_p (TREE_OPERAND (t, 2));
7144 case REAL_CST:
7145 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7147 case NOP_EXPR:
7149 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7150 if (TREE_CODE (type) == INTEGER_TYPE)
7151 return true;
7152 if (TREE_CODE (type) == REAL_TYPE)
7153 return integer_valued_real_p (TREE_OPERAND (t, 0));
7154 break;
7157 case CALL_EXPR:
7158 switch (builtin_mathfn_code (t))
7160 CASE_FLT_FN (BUILT_IN_CEIL):
7161 CASE_FLT_FN (BUILT_IN_FLOOR):
7162 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7163 CASE_FLT_FN (BUILT_IN_RINT):
7164 CASE_FLT_FN (BUILT_IN_ROUND):
7165 CASE_FLT_FN (BUILT_IN_TRUNC):
7166 return true;
7168 CASE_FLT_FN (BUILT_IN_FMIN):
7169 CASE_FLT_FN (BUILT_IN_FMAX):
7170 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7171 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7173 default:
7174 break;
7176 break;
7178 default:
7179 break;
7181 return false;
7184 /* FNDECL is assumed to be a builtin where truncation can be propagated
7185 across (for instance floor((double)f) == (double)floorf (f).
7186 Do the transformation for a call with argument ARG. */
7188 static tree
7189 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7191 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7193 if (!validate_arg (arg, REAL_TYPE))
7194 return NULL_TREE;
7196 /* Integer rounding functions are idempotent. */
7197 if (fcode == builtin_mathfn_code (arg))
7198 return arg;
7200 /* If argument is already integer valued, and we don't need to worry
7201 about setting errno, there's no need to perform rounding. */
7202 if (! flag_errno_math && integer_valued_real_p (arg))
7203 return arg;
7205 if (optimize)
7207 tree arg0 = strip_float_extensions (arg);
7208 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7209 tree newtype = TREE_TYPE (arg0);
7210 tree decl;
7212 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7213 && (decl = mathfn_built_in (newtype, fcode)))
7214 return fold_convert_loc (loc, ftype,
7215 build_call_expr_loc (loc, decl, 1,
7216 fold_convert_loc (loc,
7217 newtype,
7218 arg0)));
7220 return NULL_TREE;
7223 /* FNDECL is assumed to be builtin which can narrow the FP type of
7224 the argument, for instance lround((double)f) -> lroundf (f).
7225 Do the transformation for a call with argument ARG. */
7227 static tree
7228 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7230 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7232 if (!validate_arg (arg, REAL_TYPE))
7233 return NULL_TREE;
7235 /* If argument is already integer valued, and we don't need to worry
7236 about setting errno, there's no need to perform rounding. */
7237 if (! flag_errno_math && integer_valued_real_p (arg))
7238 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7239 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7241 if (optimize)
7243 tree ftype = TREE_TYPE (arg);
7244 tree arg0 = strip_float_extensions (arg);
7245 tree newtype = TREE_TYPE (arg0);
7246 tree decl;
7248 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7249 && (decl = mathfn_built_in (newtype, fcode)))
7250 return build_call_expr_loc (loc, decl, 1,
7251 fold_convert_loc (loc, newtype, arg0));
7254 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7255 sizeof (int) == sizeof (long). */
7256 if (TYPE_PRECISION (integer_type_node)
7257 == TYPE_PRECISION (long_integer_type_node))
7259 tree newfn = NULL_TREE;
7260 switch (fcode)
7262 CASE_FLT_FN (BUILT_IN_ICEIL):
7263 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7264 break;
7266 CASE_FLT_FN (BUILT_IN_IFLOOR):
7267 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7268 break;
7270 CASE_FLT_FN (BUILT_IN_IROUND):
7271 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7272 break;
7274 CASE_FLT_FN (BUILT_IN_IRINT):
7275 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7276 break;
7278 default:
7279 break;
7282 if (newfn)
7284 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7285 return fold_convert_loc (loc,
7286 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7290 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7291 sizeof (long long) == sizeof (long). */
7292 if (TYPE_PRECISION (long_long_integer_type_node)
7293 == TYPE_PRECISION (long_integer_type_node))
7295 tree newfn = NULL_TREE;
7296 switch (fcode)
7298 CASE_FLT_FN (BUILT_IN_LLCEIL):
7299 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7300 break;
7302 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7303 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7304 break;
7306 CASE_FLT_FN (BUILT_IN_LLROUND):
7307 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7308 break;
7310 CASE_FLT_FN (BUILT_IN_LLRINT):
7311 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7312 break;
7314 default:
7315 break;
7318 if (newfn)
7320 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7321 return fold_convert_loc (loc,
7322 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7326 return NULL_TREE;
7329 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7330 return type. Return NULL_TREE if no simplification can be made. */
7332 static tree
7333 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7335 tree res;
7337 if (!validate_arg (arg, COMPLEX_TYPE)
7338 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7339 return NULL_TREE;
7341 /* Calculate the result when the argument is a constant. */
7342 if (TREE_CODE (arg) == COMPLEX_CST
7343 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7344 type, mpfr_hypot)))
7345 return res;
7347 if (TREE_CODE (arg) == COMPLEX_EXPR)
7349 tree real = TREE_OPERAND (arg, 0);
7350 tree imag = TREE_OPERAND (arg, 1);
7352 /* If either part is zero, cabs is fabs of the other. */
7353 if (real_zerop (real))
7354 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7355 if (real_zerop (imag))
7356 return fold_build1_loc (loc, ABS_EXPR, type, real);
7358 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7359 if (flag_unsafe_math_optimizations
7360 && operand_equal_p (real, imag, OEP_PURE_SAME))
7362 const REAL_VALUE_TYPE sqrt2_trunc
7363 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7364 STRIP_NOPS (real);
7365 return fold_build2_loc (loc, MULT_EXPR, type,
7366 fold_build1_loc (loc, ABS_EXPR, type, real),
7367 build_real (type, sqrt2_trunc));
7371 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7372 if (TREE_CODE (arg) == NEGATE_EXPR
7373 || TREE_CODE (arg) == CONJ_EXPR)
7374 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7376 /* Don't do this when optimizing for size. */
7377 if (flag_unsafe_math_optimizations
7378 && optimize && optimize_function_for_speed_p (cfun))
7380 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7382 if (sqrtfn != NULL_TREE)
7384 tree rpart, ipart, result;
7386 arg = builtin_save_expr (arg);
7388 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7389 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7391 rpart = builtin_save_expr (rpart);
7392 ipart = builtin_save_expr (ipart);
7394 result = fold_build2_loc (loc, PLUS_EXPR, type,
7395 fold_build2_loc (loc, MULT_EXPR, type,
7396 rpart, rpart),
7397 fold_build2_loc (loc, MULT_EXPR, type,
7398 ipart, ipart));
7400 return build_call_expr_loc (loc, sqrtfn, 1, result);
7404 return NULL_TREE;
7407 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7408 complex tree type of the result. If NEG is true, the imaginary
7409 zero is negative. */
7411 static tree
7412 build_complex_cproj (tree type, bool neg)
7414 REAL_VALUE_TYPE rinf, rzero = dconst0;
7416 real_inf (&rinf);
7417 rzero.sign = neg;
7418 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7419 build_real (TREE_TYPE (type), rzero));
7422 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7423 return type. Return NULL_TREE if no simplification can be made. */
7425 static tree
7426 fold_builtin_cproj (location_t loc, tree arg, tree type)
7428 if (!validate_arg (arg, COMPLEX_TYPE)
7429 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7430 return NULL_TREE;
7432 /* If there are no infinities, return arg. */
7433 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7434 return non_lvalue_loc (loc, arg);
7436 /* Calculate the result when the argument is a constant. */
7437 if (TREE_CODE (arg) == COMPLEX_CST)
7439 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7440 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7442 if (real_isinf (real) || real_isinf (imag))
7443 return build_complex_cproj (type, imag->sign);
7444 else
7445 return arg;
7447 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7449 tree real = TREE_OPERAND (arg, 0);
7450 tree imag = TREE_OPERAND (arg, 1);
7452 STRIP_NOPS (real);
7453 STRIP_NOPS (imag);
7455 /* If the real part is inf and the imag part is known to be
7456 nonnegative, return (inf + 0i). Remember side-effects are
7457 possible in the imag part. */
7458 if (TREE_CODE (real) == REAL_CST
7459 && real_isinf (TREE_REAL_CST_PTR (real))
7460 && tree_expr_nonnegative_p (imag))
7461 return omit_one_operand_loc (loc, type,
7462 build_complex_cproj (type, false),
7463 arg);
7465 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7466 Remember side-effects are possible in the real part. */
7467 if (TREE_CODE (imag) == REAL_CST
7468 && real_isinf (TREE_REAL_CST_PTR (imag)))
7469 return
7470 omit_one_operand_loc (loc, type,
7471 build_complex_cproj (type, TREE_REAL_CST_PTR
7472 (imag)->sign), arg);
7475 return NULL_TREE;
7478 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7479 Return NULL_TREE if no simplification can be made. */
7481 static tree
7482 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7485 enum built_in_function fcode;
7486 tree res;
7488 if (!validate_arg (arg, REAL_TYPE))
7489 return NULL_TREE;
7491 /* Calculate the result when the argument is a constant. */
7492 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7493 return res;
7495 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7496 fcode = builtin_mathfn_code (arg);
7497 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7499 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7500 arg = fold_build2_loc (loc, MULT_EXPR, type,
7501 CALL_EXPR_ARG (arg, 0),
7502 build_real (type, dconsthalf));
7503 return build_call_expr_loc (loc, expfn, 1, arg);
7506 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7507 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7509 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7511 if (powfn)
7513 tree arg0 = CALL_EXPR_ARG (arg, 0);
7514 tree tree_root;
7515 /* The inner root was either sqrt or cbrt. */
7516 /* This was a conditional expression but it triggered a bug
7517 in Sun C 5.5. */
7518 REAL_VALUE_TYPE dconstroot;
7519 if (BUILTIN_SQRT_P (fcode))
7520 dconstroot = dconsthalf;
7521 else
7522 dconstroot = dconst_third ();
7524 /* Adjust for the outer root. */
7525 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7526 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7527 tree_root = build_real (type, dconstroot);
7528 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7532 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7533 if (flag_unsafe_math_optimizations
7534 && (fcode == BUILT_IN_POW
7535 || fcode == BUILT_IN_POWF
7536 || fcode == BUILT_IN_POWL))
7538 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7539 tree arg0 = CALL_EXPR_ARG (arg, 0);
7540 tree arg1 = CALL_EXPR_ARG (arg, 1);
7541 tree narg1;
7542 if (!tree_expr_nonnegative_p (arg0))
7543 arg0 = build1 (ABS_EXPR, type, arg0);
7544 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7545 build_real (type, dconsthalf));
7546 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7549 return NULL_TREE;
7552 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7553 Return NULL_TREE if no simplification can be made. */
7555 static tree
7556 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7558 const enum built_in_function fcode = builtin_mathfn_code (arg);
7559 tree res;
7561 if (!validate_arg (arg, REAL_TYPE))
7562 return NULL_TREE;
7564 /* Calculate the result when the argument is a constant. */
7565 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7566 return res;
7568 if (flag_unsafe_math_optimizations)
7570 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7571 if (BUILTIN_EXPONENT_P (fcode))
7573 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7574 const REAL_VALUE_TYPE third_trunc =
7575 real_value_truncate (TYPE_MODE (type), dconst_third ());
7576 arg = fold_build2_loc (loc, MULT_EXPR, type,
7577 CALL_EXPR_ARG (arg, 0),
7578 build_real (type, third_trunc));
7579 return build_call_expr_loc (loc, expfn, 1, arg);
7582 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7583 if (BUILTIN_SQRT_P (fcode))
7585 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7587 if (powfn)
7589 tree arg0 = CALL_EXPR_ARG (arg, 0);
7590 tree tree_root;
7591 REAL_VALUE_TYPE dconstroot = dconst_third ();
7593 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7594 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7595 tree_root = build_real (type, dconstroot);
7596 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7600 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7601 if (BUILTIN_CBRT_P (fcode))
7603 tree arg0 = CALL_EXPR_ARG (arg, 0);
7604 if (tree_expr_nonnegative_p (arg0))
7606 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7608 if (powfn)
7610 tree tree_root;
7611 REAL_VALUE_TYPE dconstroot;
7613 real_arithmetic (&dconstroot, MULT_EXPR,
7614 dconst_third_ptr (), dconst_third_ptr ());
7615 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7616 tree_root = build_real (type, dconstroot);
7617 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7622 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7623 if (fcode == BUILT_IN_POW
7624 || fcode == BUILT_IN_POWF
7625 || fcode == BUILT_IN_POWL)
7627 tree arg00 = CALL_EXPR_ARG (arg, 0);
7628 tree arg01 = CALL_EXPR_ARG (arg, 1);
7629 if (tree_expr_nonnegative_p (arg00))
7631 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7632 const REAL_VALUE_TYPE dconstroot
7633 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7634 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7635 build_real (type, dconstroot));
7636 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7640 return NULL_TREE;
7643 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7644 TYPE is the type of the return value. Return NULL_TREE if no
7645 simplification can be made. */
7647 static tree
7648 fold_builtin_cos (location_t loc,
7649 tree arg, tree type, tree fndecl)
7651 tree res, narg;
7653 if (!validate_arg (arg, REAL_TYPE))
7654 return NULL_TREE;
7656 /* Calculate the result when the argument is a constant. */
7657 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7658 return res;
7660 /* Optimize cos(-x) into cos (x). */
7661 if ((narg = fold_strip_sign_ops (arg)))
7662 return build_call_expr_loc (loc, fndecl, 1, narg);
7664 return NULL_TREE;
7667 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7668 Return NULL_TREE if no simplification can be made. */
7670 static tree
7671 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7673 if (validate_arg (arg, REAL_TYPE))
7675 tree res, narg;
7677 /* Calculate the result when the argument is a constant. */
7678 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7679 return res;
7681 /* Optimize cosh(-x) into cosh (x). */
7682 if ((narg = fold_strip_sign_ops (arg)))
7683 return build_call_expr_loc (loc, fndecl, 1, narg);
7686 return NULL_TREE;
7689 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7690 argument ARG. TYPE is the type of the return value. Return
7691 NULL_TREE if no simplification can be made. */
7693 static tree
7694 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7695 bool hyper)
7697 if (validate_arg (arg, COMPLEX_TYPE)
7698 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7700 tree tmp;
7702 /* Calculate the result when the argument is a constant. */
7703 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7704 return tmp;
7706 /* Optimize fn(-x) into fn(x). */
7707 if ((tmp = fold_strip_sign_ops (arg)))
7708 return build_call_expr_loc (loc, fndecl, 1, tmp);
7711 return NULL_TREE;
7714 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7715 Return NULL_TREE if no simplification can be made. */
7717 static tree
7718 fold_builtin_tan (tree arg, tree type)
7720 enum built_in_function fcode;
7721 tree res;
7723 if (!validate_arg (arg, REAL_TYPE))
7724 return NULL_TREE;
7726 /* Calculate the result when the argument is a constant. */
7727 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7728 return res;
7730 /* Optimize tan(atan(x)) = x. */
7731 fcode = builtin_mathfn_code (arg);
7732 if (flag_unsafe_math_optimizations
7733 && (fcode == BUILT_IN_ATAN
7734 || fcode == BUILT_IN_ATANF
7735 || fcode == BUILT_IN_ATANL))
7736 return CALL_EXPR_ARG (arg, 0);
7738 return NULL_TREE;
7741 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7742 NULL_TREE if no simplification can be made. */
7744 static tree
7745 fold_builtin_sincos (location_t loc,
7746 tree arg0, tree arg1, tree arg2)
7748 tree type;
7749 tree res, fn, call;
7751 if (!validate_arg (arg0, REAL_TYPE)
7752 || !validate_arg (arg1, POINTER_TYPE)
7753 || !validate_arg (arg2, POINTER_TYPE))
7754 return NULL_TREE;
7756 type = TREE_TYPE (arg0);
7758 /* Calculate the result when the argument is a constant. */
7759 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7760 return res;
7762 /* Canonicalize sincos to cexpi. */
7763 if (!targetm.libc_has_function (function_c99_math_complex))
7764 return NULL_TREE;
7765 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7766 if (!fn)
7767 return NULL_TREE;
7769 call = build_call_expr_loc (loc, fn, 1, arg0);
7770 call = builtin_save_expr (call);
7772 return build2 (COMPOUND_EXPR, void_type_node,
7773 build2 (MODIFY_EXPR, void_type_node,
7774 build_fold_indirect_ref_loc (loc, arg1),
7775 build1 (IMAGPART_EXPR, type, call)),
7776 build2 (MODIFY_EXPR, void_type_node,
7777 build_fold_indirect_ref_loc (loc, arg2),
7778 build1 (REALPART_EXPR, type, call)));
7781 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7782 NULL_TREE if no simplification can be made. */
7784 static tree
7785 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7787 tree rtype;
7788 tree realp, imagp, ifn;
7789 tree res;
7791 if (!validate_arg (arg0, COMPLEX_TYPE)
7792 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7793 return NULL_TREE;
7795 /* Calculate the result when the argument is a constant. */
7796 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7797 return res;
7799 rtype = TREE_TYPE (TREE_TYPE (arg0));
7801 /* In case we can figure out the real part of arg0 and it is constant zero
7802 fold to cexpi. */
7803 if (!targetm.libc_has_function (function_c99_math_complex))
7804 return NULL_TREE;
7805 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7806 if (!ifn)
7807 return NULL_TREE;
7809 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7810 && real_zerop (realp))
7812 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7813 return build_call_expr_loc (loc, ifn, 1, narg);
7816 /* In case we can easily decompose real and imaginary parts split cexp
7817 to exp (r) * cexpi (i). */
7818 if (flag_unsafe_math_optimizations
7819 && realp)
7821 tree rfn, rcall, icall;
7823 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7824 if (!rfn)
7825 return NULL_TREE;
7827 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7828 if (!imagp)
7829 return NULL_TREE;
7831 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7832 icall = builtin_save_expr (icall);
7833 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7834 rcall = builtin_save_expr (rcall);
7835 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7836 fold_build2_loc (loc, MULT_EXPR, rtype,
7837 rcall,
7838 fold_build1_loc (loc, REALPART_EXPR,
7839 rtype, icall)),
7840 fold_build2_loc (loc, MULT_EXPR, rtype,
7841 rcall,
7842 fold_build1_loc (loc, IMAGPART_EXPR,
7843 rtype, icall)));
7846 return NULL_TREE;
7849 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7850 Return NULL_TREE if no simplification can be made. */
7852 static tree
7853 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7855 if (!validate_arg (arg, REAL_TYPE))
7856 return NULL_TREE;
7858 /* Optimize trunc of constant value. */
7859 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7861 REAL_VALUE_TYPE r, x;
7862 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7864 x = TREE_REAL_CST (arg);
7865 real_trunc (&r, TYPE_MODE (type), &x);
7866 return build_real (type, r);
7869 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7872 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7873 Return NULL_TREE if no simplification can be made. */
7875 static tree
7876 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7878 if (!validate_arg (arg, REAL_TYPE))
7879 return NULL_TREE;
7881 /* Optimize floor of constant value. */
7882 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7884 REAL_VALUE_TYPE x;
7886 x = TREE_REAL_CST (arg);
7887 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7889 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7890 REAL_VALUE_TYPE r;
7892 real_floor (&r, TYPE_MODE (type), &x);
7893 return build_real (type, r);
7897 /* Fold floor (x) where x is nonnegative to trunc (x). */
7898 if (tree_expr_nonnegative_p (arg))
7900 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7901 if (truncfn)
7902 return build_call_expr_loc (loc, truncfn, 1, arg);
7905 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7908 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7909 Return NULL_TREE if no simplification can be made. */
7911 static tree
7912 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7914 if (!validate_arg (arg, REAL_TYPE))
7915 return NULL_TREE;
7917 /* Optimize ceil of constant value. */
7918 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7920 REAL_VALUE_TYPE x;
7922 x = TREE_REAL_CST (arg);
7923 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7925 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7926 REAL_VALUE_TYPE r;
7928 real_ceil (&r, TYPE_MODE (type), &x);
7929 return build_real (type, r);
7933 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7936 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7937 Return NULL_TREE if no simplification can be made. */
7939 static tree
7940 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7942 if (!validate_arg (arg, REAL_TYPE))
7943 return NULL_TREE;
7945 /* Optimize round of constant value. */
7946 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7948 REAL_VALUE_TYPE x;
7950 x = TREE_REAL_CST (arg);
7951 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7953 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7954 REAL_VALUE_TYPE r;
7956 real_round (&r, TYPE_MODE (type), &x);
7957 return build_real (type, r);
7961 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7964 /* Fold function call to builtin lround, lroundf or lroundl (or the
7965 corresponding long long versions) and other rounding functions. ARG
7966 is the argument to the call. Return NULL_TREE if no simplification
7967 can be made. */
7969 static tree
7970 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7972 if (!validate_arg (arg, REAL_TYPE))
7973 return NULL_TREE;
7975 /* Optimize lround of constant value. */
7976 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7978 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7980 if (real_isfinite (&x))
7982 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7983 tree ftype = TREE_TYPE (arg);
7984 REAL_VALUE_TYPE r;
7985 bool fail = false;
7987 switch (DECL_FUNCTION_CODE (fndecl))
7989 CASE_FLT_FN (BUILT_IN_IFLOOR):
7990 CASE_FLT_FN (BUILT_IN_LFLOOR):
7991 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7992 real_floor (&r, TYPE_MODE (ftype), &x);
7993 break;
7995 CASE_FLT_FN (BUILT_IN_ICEIL):
7996 CASE_FLT_FN (BUILT_IN_LCEIL):
7997 CASE_FLT_FN (BUILT_IN_LLCEIL):
7998 real_ceil (&r, TYPE_MODE (ftype), &x);
7999 break;
8001 CASE_FLT_FN (BUILT_IN_IROUND):
8002 CASE_FLT_FN (BUILT_IN_LROUND):
8003 CASE_FLT_FN (BUILT_IN_LLROUND):
8004 real_round (&r, TYPE_MODE (ftype), &x);
8005 break;
8007 default:
8008 gcc_unreachable ();
8011 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8012 if (!fail)
8013 return wide_int_to_tree (itype, val);
8017 switch (DECL_FUNCTION_CODE (fndecl))
8019 CASE_FLT_FN (BUILT_IN_LFLOOR):
8020 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8021 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8022 if (tree_expr_nonnegative_p (arg))
8023 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8024 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8025 break;
8026 default:;
8029 return fold_fixed_mathfn (loc, fndecl, arg);
8032 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8033 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8034 the argument to the call. Return NULL_TREE if no simplification can
8035 be made. */
8037 static tree
8038 fold_builtin_bitop (tree fndecl, tree arg)
8040 if (!validate_arg (arg, INTEGER_TYPE))
8041 return NULL_TREE;
8043 /* Optimize for constant argument. */
8044 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8046 tree type = TREE_TYPE (arg);
8047 int result;
8049 switch (DECL_FUNCTION_CODE (fndecl))
8051 CASE_INT_FN (BUILT_IN_FFS):
8052 result = wi::ffs (arg);
8053 break;
8055 CASE_INT_FN (BUILT_IN_CLZ):
8056 if (wi::ne_p (arg, 0))
8057 result = wi::clz (arg);
8058 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8059 result = TYPE_PRECISION (type);
8060 break;
8062 CASE_INT_FN (BUILT_IN_CTZ):
8063 if (wi::ne_p (arg, 0))
8064 result = wi::ctz (arg);
8065 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8066 result = TYPE_PRECISION (type);
8067 break;
8069 CASE_INT_FN (BUILT_IN_CLRSB):
8070 result = wi::clrsb (arg);
8071 break;
8073 CASE_INT_FN (BUILT_IN_POPCOUNT):
8074 result = wi::popcount (arg);
8075 break;
8077 CASE_INT_FN (BUILT_IN_PARITY):
8078 result = wi::parity (arg);
8079 break;
8081 default:
8082 gcc_unreachable ();
8085 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8088 return NULL_TREE;
8091 /* Fold function call to builtin_bswap and the short, long and long long
8092 variants. Return NULL_TREE if no simplification can be made. */
8093 static tree
8094 fold_builtin_bswap (tree fndecl, tree arg)
8096 if (! validate_arg (arg, INTEGER_TYPE))
8097 return NULL_TREE;
8099 /* Optimize constant value. */
8100 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8102 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8104 switch (DECL_FUNCTION_CODE (fndecl))
8106 case BUILT_IN_BSWAP16:
8107 case BUILT_IN_BSWAP32:
8108 case BUILT_IN_BSWAP64:
8110 signop sgn = TYPE_SIGN (type);
8111 tree result =
8112 wide_int_to_tree (type,
8113 wide_int::from (arg, TYPE_PRECISION (type),
8114 sgn).bswap ());
8115 return result;
8117 default:
8118 gcc_unreachable ();
8122 return NULL_TREE;
8125 /* A subroutine of fold_builtin to fold the various logarithmic
8126 functions. Return NULL_TREE if no simplification can me made.
8127 FUNC is the corresponding MPFR logarithm function. */
8129 static tree
8130 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8131 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8133 if (validate_arg (arg, REAL_TYPE))
8135 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8136 tree res;
8137 const enum built_in_function fcode = builtin_mathfn_code (arg);
8139 /* Calculate the result when the argument is a constant. */
8140 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8141 return res;
8143 /* Special case, optimize logN(expN(x)) = x. */
8144 if (flag_unsafe_math_optimizations
8145 && ((func == mpfr_log
8146 && (fcode == BUILT_IN_EXP
8147 || fcode == BUILT_IN_EXPF
8148 || fcode == BUILT_IN_EXPL))
8149 || (func == mpfr_log2
8150 && (fcode == BUILT_IN_EXP2
8151 || fcode == BUILT_IN_EXP2F
8152 || fcode == BUILT_IN_EXP2L))
8153 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8154 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8156 /* Optimize logN(func()) for various exponential functions. We
8157 want to determine the value "x" and the power "exponent" in
8158 order to transform logN(x**exponent) into exponent*logN(x). */
8159 if (flag_unsafe_math_optimizations)
8161 tree exponent = 0, x = 0;
8163 switch (fcode)
8165 CASE_FLT_FN (BUILT_IN_EXP):
8166 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8167 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8168 dconst_e ()));
8169 exponent = CALL_EXPR_ARG (arg, 0);
8170 break;
8171 CASE_FLT_FN (BUILT_IN_EXP2):
8172 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8173 x = build_real (type, dconst2);
8174 exponent = CALL_EXPR_ARG (arg, 0);
8175 break;
8176 CASE_FLT_FN (BUILT_IN_EXP10):
8177 CASE_FLT_FN (BUILT_IN_POW10):
8178 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8180 REAL_VALUE_TYPE dconst10;
8181 real_from_integer (&dconst10, VOIDmode, 10, SIGNED);
8182 x = build_real (type, dconst10);
8184 exponent = CALL_EXPR_ARG (arg, 0);
8185 break;
8186 CASE_FLT_FN (BUILT_IN_SQRT):
8187 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8188 x = CALL_EXPR_ARG (arg, 0);
8189 exponent = build_real (type, dconsthalf);
8190 break;
8191 CASE_FLT_FN (BUILT_IN_CBRT):
8192 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8193 x = CALL_EXPR_ARG (arg, 0);
8194 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8195 dconst_third ()));
8196 break;
8197 CASE_FLT_FN (BUILT_IN_POW):
8198 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8199 x = CALL_EXPR_ARG (arg, 0);
8200 exponent = CALL_EXPR_ARG (arg, 1);
8201 break;
8202 default:
8203 break;
8206 /* Now perform the optimization. */
8207 if (x && exponent)
8209 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8210 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8215 return NULL_TREE;
8218 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8219 NULL_TREE if no simplification can be made. */
8221 static tree
8222 fold_builtin_hypot (location_t loc, tree fndecl,
8223 tree arg0, tree arg1, tree type)
8225 tree res, narg0, narg1;
8227 if (!validate_arg (arg0, REAL_TYPE)
8228 || !validate_arg (arg1, REAL_TYPE))
8229 return NULL_TREE;
8231 /* Calculate the result when the argument is a constant. */
8232 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8233 return res;
8235 /* If either argument to hypot has a negate or abs, strip that off.
8236 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8237 narg0 = fold_strip_sign_ops (arg0);
8238 narg1 = fold_strip_sign_ops (arg1);
8239 if (narg0 || narg1)
8241 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8242 narg1 ? narg1 : arg1);
8245 /* If either argument is zero, hypot is fabs of the other. */
8246 if (real_zerop (arg0))
8247 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8248 else if (real_zerop (arg1))
8249 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8251 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8252 if (flag_unsafe_math_optimizations
8253 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8255 const REAL_VALUE_TYPE sqrt2_trunc
8256 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8257 return fold_build2_loc (loc, MULT_EXPR, type,
8258 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8259 build_real (type, sqrt2_trunc));
8262 return NULL_TREE;
8266 /* Fold a builtin function call to pow, powf, or powl. Return
8267 NULL_TREE if no simplification can be made. */
8268 static tree
8269 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8271 tree res;
8273 if (!validate_arg (arg0, REAL_TYPE)
8274 || !validate_arg (arg1, REAL_TYPE))
8275 return NULL_TREE;
8277 /* Calculate the result when the argument is a constant. */
8278 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8279 return res;
8281 /* Optimize pow(1.0,y) = 1.0. */
8282 if (real_onep (arg0))
8283 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8285 if (TREE_CODE (arg1) == REAL_CST
8286 && !TREE_OVERFLOW (arg1))
8288 REAL_VALUE_TYPE cint;
8289 REAL_VALUE_TYPE c;
8290 HOST_WIDE_INT n;
8292 c = TREE_REAL_CST (arg1);
8294 /* Optimize pow(x,0.0) = 1.0. */
8295 if (REAL_VALUES_EQUAL (c, dconst0))
8296 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8297 arg0);
8299 /* Optimize pow(x,1.0) = x. */
8300 if (REAL_VALUES_EQUAL (c, dconst1))
8301 return arg0;
8303 /* Optimize pow(x,-1.0) = 1.0/x. */
8304 if (REAL_VALUES_EQUAL (c, dconstm1))
8305 return fold_build2_loc (loc, RDIV_EXPR, type,
8306 build_real (type, dconst1), arg0);
8308 /* Optimize pow(x,0.5) = sqrt(x). */
8309 if (flag_unsafe_math_optimizations
8310 && REAL_VALUES_EQUAL (c, dconsthalf))
8312 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8314 if (sqrtfn != NULL_TREE)
8315 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8318 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8319 if (flag_unsafe_math_optimizations)
8321 const REAL_VALUE_TYPE dconstroot
8322 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8324 if (REAL_VALUES_EQUAL (c, dconstroot))
8326 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8327 if (cbrtfn != NULL_TREE)
8328 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8332 /* Check for an integer exponent. */
8333 n = real_to_integer (&c);
8334 real_from_integer (&cint, VOIDmode, n, SIGNED);
8335 if (real_identical (&c, &cint))
8337 /* Attempt to evaluate pow at compile-time, unless this should
8338 raise an exception. */
8339 if (TREE_CODE (arg0) == REAL_CST
8340 && !TREE_OVERFLOW (arg0)
8341 && (n > 0
8342 || (!flag_trapping_math && !flag_errno_math)
8343 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8345 REAL_VALUE_TYPE x;
8346 bool inexact;
8348 x = TREE_REAL_CST (arg0);
8349 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8350 if (flag_unsafe_math_optimizations || !inexact)
8351 return build_real (type, x);
8354 /* Strip sign ops from even integer powers. */
8355 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8357 tree narg0 = fold_strip_sign_ops (arg0);
8358 if (narg0)
8359 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8364 if (flag_unsafe_math_optimizations)
8366 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8368 /* Optimize pow(expN(x),y) = expN(x*y). */
8369 if (BUILTIN_EXPONENT_P (fcode))
8371 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8372 tree arg = CALL_EXPR_ARG (arg0, 0);
8373 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8374 return build_call_expr_loc (loc, expfn, 1, arg);
8377 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8378 if (BUILTIN_SQRT_P (fcode))
8380 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8381 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8382 build_real (type, dconsthalf));
8383 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8386 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8387 if (BUILTIN_CBRT_P (fcode))
8389 tree arg = CALL_EXPR_ARG (arg0, 0);
8390 if (tree_expr_nonnegative_p (arg))
8392 const REAL_VALUE_TYPE dconstroot
8393 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8394 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8395 build_real (type, dconstroot));
8396 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8400 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8401 if (fcode == BUILT_IN_POW
8402 || fcode == BUILT_IN_POWF
8403 || fcode == BUILT_IN_POWL)
8405 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8406 if (tree_expr_nonnegative_p (arg00))
8408 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8409 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8410 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8415 return NULL_TREE;
8418 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8419 Return NULL_TREE if no simplification can be made. */
8420 static tree
8421 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8422 tree arg0, tree arg1, tree type)
8424 if (!validate_arg (arg0, REAL_TYPE)
8425 || !validate_arg (arg1, INTEGER_TYPE))
8426 return NULL_TREE;
8428 /* Optimize pow(1.0,y) = 1.0. */
8429 if (real_onep (arg0))
8430 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8432 if (tree_fits_shwi_p (arg1))
8434 HOST_WIDE_INT c = tree_to_shwi (arg1);
8436 /* Evaluate powi at compile-time. */
8437 if (TREE_CODE (arg0) == REAL_CST
8438 && !TREE_OVERFLOW (arg0))
8440 REAL_VALUE_TYPE x;
8441 x = TREE_REAL_CST (arg0);
8442 real_powi (&x, TYPE_MODE (type), &x, c);
8443 return build_real (type, x);
8446 /* Optimize pow(x,0) = 1.0. */
8447 if (c == 0)
8448 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8449 arg0);
8451 /* Optimize pow(x,1) = x. */
8452 if (c == 1)
8453 return arg0;
8455 /* Optimize pow(x,-1) = 1.0/x. */
8456 if (c == -1)
8457 return fold_build2_loc (loc, RDIV_EXPR, type,
8458 build_real (type, dconst1), arg0);
8461 return NULL_TREE;
8464 /* A subroutine of fold_builtin to fold the various exponent
8465 functions. Return NULL_TREE if no simplification can be made.
8466 FUNC is the corresponding MPFR exponent function. */
8468 static tree
8469 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8470 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8472 if (validate_arg (arg, REAL_TYPE))
8474 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8475 tree res;
8477 /* Calculate the result when the argument is a constant. */
8478 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8479 return res;
8481 /* Optimize expN(logN(x)) = x. */
8482 if (flag_unsafe_math_optimizations)
8484 const enum built_in_function fcode = builtin_mathfn_code (arg);
8486 if ((func == mpfr_exp
8487 && (fcode == BUILT_IN_LOG
8488 || fcode == BUILT_IN_LOGF
8489 || fcode == BUILT_IN_LOGL))
8490 || (func == mpfr_exp2
8491 && (fcode == BUILT_IN_LOG2
8492 || fcode == BUILT_IN_LOG2F
8493 || fcode == BUILT_IN_LOG2L))
8494 || (func == mpfr_exp10
8495 && (fcode == BUILT_IN_LOG10
8496 || fcode == BUILT_IN_LOG10F
8497 || fcode == BUILT_IN_LOG10L)))
8498 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8502 return NULL_TREE;
8505 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8506 Return NULL_TREE if no simplification can be made. */
8508 static tree
8509 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8511 tree fn, len, lenp1, call, type;
8513 if (!validate_arg (dest, POINTER_TYPE)
8514 || !validate_arg (src, POINTER_TYPE))
8515 return NULL_TREE;
8517 len = c_strlen (src, 1);
8518 if (!len
8519 || TREE_CODE (len) != INTEGER_CST)
8520 return NULL_TREE;
8522 if (optimize_function_for_size_p (cfun)
8523 /* If length is zero it's small enough. */
8524 && !integer_zerop (len))
8525 return NULL_TREE;
8527 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8528 if (!fn)
8529 return NULL_TREE;
8531 lenp1 = size_binop_loc (loc, PLUS_EXPR,
8532 fold_convert_loc (loc, size_type_node, len),
8533 build_int_cst (size_type_node, 1));
8534 /* We use dest twice in building our expression. Save it from
8535 multiple expansions. */
8536 dest = builtin_save_expr (dest);
8537 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8539 type = TREE_TYPE (TREE_TYPE (fndecl));
8540 dest = fold_build_pointer_plus_loc (loc, dest, len);
8541 dest = fold_convert_loc (loc, type, dest);
8542 dest = omit_one_operand_loc (loc, type, dest, call);
8543 return dest;
8546 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8547 arguments to the call, and TYPE is its return type.
8548 Return NULL_TREE if no simplification can be made. */
8550 static tree
8551 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8553 if (!validate_arg (arg1, POINTER_TYPE)
8554 || !validate_arg (arg2, INTEGER_TYPE)
8555 || !validate_arg (len, INTEGER_TYPE))
8556 return NULL_TREE;
8557 else
8559 const char *p1;
8561 if (TREE_CODE (arg2) != INTEGER_CST
8562 || !tree_fits_uhwi_p (len))
8563 return NULL_TREE;
8565 p1 = c_getstr (arg1);
8566 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8568 char c;
8569 const char *r;
8570 tree tem;
8572 if (target_char_cast (arg2, &c))
8573 return NULL_TREE;
8575 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8577 if (r == NULL)
8578 return build_int_cst (TREE_TYPE (arg1), 0);
8580 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8581 return fold_convert_loc (loc, type, tem);
8583 return NULL_TREE;
8587 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8588 Return NULL_TREE if no simplification can be made. */
8590 static tree
8591 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8593 const char *p1, *p2;
8595 if (!validate_arg (arg1, POINTER_TYPE)
8596 || !validate_arg (arg2, POINTER_TYPE)
8597 || !validate_arg (len, INTEGER_TYPE))
8598 return NULL_TREE;
8600 /* If the LEN parameter is zero, return zero. */
8601 if (integer_zerop (len))
8602 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8603 arg1, arg2);
8605 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8606 if (operand_equal_p (arg1, arg2, 0))
8607 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8609 p1 = c_getstr (arg1);
8610 p2 = c_getstr (arg2);
8612 /* If all arguments are constant, and the value of len is not greater
8613 than the lengths of arg1 and arg2, evaluate at compile-time. */
8614 if (tree_fits_uhwi_p (len) && p1 && p2
8615 && compare_tree_int (len, strlen (p1) + 1) <= 0
8616 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8618 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8620 if (r > 0)
8621 return integer_one_node;
8622 else if (r < 0)
8623 return integer_minus_one_node;
8624 else
8625 return integer_zero_node;
8628 /* If len parameter is one, return an expression corresponding to
8629 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8630 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8632 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8633 tree cst_uchar_ptr_node
8634 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8636 tree ind1
8637 = fold_convert_loc (loc, integer_type_node,
8638 build1 (INDIRECT_REF, cst_uchar_node,
8639 fold_convert_loc (loc,
8640 cst_uchar_ptr_node,
8641 arg1)));
8642 tree ind2
8643 = fold_convert_loc (loc, integer_type_node,
8644 build1 (INDIRECT_REF, cst_uchar_node,
8645 fold_convert_loc (loc,
8646 cst_uchar_ptr_node,
8647 arg2)));
8648 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8651 return NULL_TREE;
8654 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8655 Return NULL_TREE if no simplification can be made. */
8657 static tree
8658 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8660 const char *p1, *p2;
8662 if (!validate_arg (arg1, POINTER_TYPE)
8663 || !validate_arg (arg2, POINTER_TYPE))
8664 return NULL_TREE;
8666 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8667 if (operand_equal_p (arg1, arg2, 0))
8668 return integer_zero_node;
8670 p1 = c_getstr (arg1);
8671 p2 = c_getstr (arg2);
8673 if (p1 && p2)
8675 const int i = strcmp (p1, p2);
8676 if (i < 0)
8677 return integer_minus_one_node;
8678 else if (i > 0)
8679 return integer_one_node;
8680 else
8681 return integer_zero_node;
8684 /* If the second arg is "", return *(const unsigned char*)arg1. */
8685 if (p2 && *p2 == '\0')
8687 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8688 tree cst_uchar_ptr_node
8689 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8691 return fold_convert_loc (loc, integer_type_node,
8692 build1 (INDIRECT_REF, cst_uchar_node,
8693 fold_convert_loc (loc,
8694 cst_uchar_ptr_node,
8695 arg1)));
8698 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8699 if (p1 && *p1 == '\0')
8701 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8702 tree cst_uchar_ptr_node
8703 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8705 tree temp
8706 = fold_convert_loc (loc, integer_type_node,
8707 build1 (INDIRECT_REF, cst_uchar_node,
8708 fold_convert_loc (loc,
8709 cst_uchar_ptr_node,
8710 arg2)));
8711 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8714 return NULL_TREE;
8717 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8718 Return NULL_TREE if no simplification can be made. */
8720 static tree
8721 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8723 const char *p1, *p2;
8725 if (!validate_arg (arg1, POINTER_TYPE)
8726 || !validate_arg (arg2, POINTER_TYPE)
8727 || !validate_arg (len, INTEGER_TYPE))
8728 return NULL_TREE;
8730 /* If the LEN parameter is zero, return zero. */
8731 if (integer_zerop (len))
8732 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8733 arg1, arg2);
8735 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8736 if (operand_equal_p (arg1, arg2, 0))
8737 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8739 p1 = c_getstr (arg1);
8740 p2 = c_getstr (arg2);
8742 if (tree_fits_uhwi_p (len) && p1 && p2)
8744 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8745 if (i > 0)
8746 return integer_one_node;
8747 else if (i < 0)
8748 return integer_minus_one_node;
8749 else
8750 return integer_zero_node;
8753 /* If the second arg is "", and the length is greater than zero,
8754 return *(const unsigned char*)arg1. */
8755 if (p2 && *p2 == '\0'
8756 && TREE_CODE (len) == INTEGER_CST
8757 && tree_int_cst_sgn (len) == 1)
8759 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8760 tree cst_uchar_ptr_node
8761 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8763 return fold_convert_loc (loc, integer_type_node,
8764 build1 (INDIRECT_REF, cst_uchar_node,
8765 fold_convert_loc (loc,
8766 cst_uchar_ptr_node,
8767 arg1)));
8770 /* If the first arg is "", and the length is greater than zero,
8771 return -*(const unsigned char*)arg2. */
8772 if (p1 && *p1 == '\0'
8773 && TREE_CODE (len) == INTEGER_CST
8774 && tree_int_cst_sgn (len) == 1)
8776 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8777 tree cst_uchar_ptr_node
8778 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8780 tree temp = fold_convert_loc (loc, integer_type_node,
8781 build1 (INDIRECT_REF, cst_uchar_node,
8782 fold_convert_loc (loc,
8783 cst_uchar_ptr_node,
8784 arg2)));
8785 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8788 /* If len parameter is one, return an expression corresponding to
8789 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8790 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8792 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8793 tree cst_uchar_ptr_node
8794 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8796 tree ind1 = fold_convert_loc (loc, integer_type_node,
8797 build1 (INDIRECT_REF, cst_uchar_node,
8798 fold_convert_loc (loc,
8799 cst_uchar_ptr_node,
8800 arg1)));
8801 tree ind2 = fold_convert_loc (loc, integer_type_node,
8802 build1 (INDIRECT_REF, cst_uchar_node,
8803 fold_convert_loc (loc,
8804 cst_uchar_ptr_node,
8805 arg2)));
8806 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8809 return NULL_TREE;
8812 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8813 ARG. Return NULL_TREE if no simplification can be made. */
8815 static tree
8816 fold_builtin_signbit (location_t loc, tree arg, tree type)
8818 if (!validate_arg (arg, REAL_TYPE))
8819 return NULL_TREE;
8821 /* If ARG is a compile-time constant, determine the result. */
8822 if (TREE_CODE (arg) == REAL_CST
8823 && !TREE_OVERFLOW (arg))
8825 REAL_VALUE_TYPE c;
8827 c = TREE_REAL_CST (arg);
8828 return (REAL_VALUE_NEGATIVE (c)
8829 ? build_one_cst (type)
8830 : build_zero_cst (type));
8833 /* If ARG is non-negative, the result is always zero. */
8834 if (tree_expr_nonnegative_p (arg))
8835 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8837 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8838 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8839 return fold_convert (type,
8840 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8841 build_real (TREE_TYPE (arg), dconst0)));
8843 return NULL_TREE;
8846 /* Fold function call to builtin copysign, copysignf or copysignl with
8847 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8848 be made. */
8850 static tree
8851 fold_builtin_copysign (location_t loc, tree fndecl,
8852 tree arg1, tree arg2, tree type)
8854 tree tem;
8856 if (!validate_arg (arg1, REAL_TYPE)
8857 || !validate_arg (arg2, REAL_TYPE))
8858 return NULL_TREE;
8860 /* copysign(X,X) is X. */
8861 if (operand_equal_p (arg1, arg2, 0))
8862 return fold_convert_loc (loc, type, arg1);
8864 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8865 if (TREE_CODE (arg1) == REAL_CST
8866 && TREE_CODE (arg2) == REAL_CST
8867 && !TREE_OVERFLOW (arg1)
8868 && !TREE_OVERFLOW (arg2))
8870 REAL_VALUE_TYPE c1, c2;
8872 c1 = TREE_REAL_CST (arg1);
8873 c2 = TREE_REAL_CST (arg2);
8874 /* c1.sign := c2.sign. */
8875 real_copysign (&c1, &c2);
8876 return build_real (type, c1);
8879 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8880 Remember to evaluate Y for side-effects. */
8881 if (tree_expr_nonnegative_p (arg2))
8882 return omit_one_operand_loc (loc, type,
8883 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8884 arg2);
8886 /* Strip sign changing operations for the first argument. */
8887 tem = fold_strip_sign_ops (arg1);
8888 if (tem)
8889 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8891 return NULL_TREE;
8894 /* Fold a call to builtin isascii with argument ARG. */
8896 static tree
8897 fold_builtin_isascii (location_t loc, tree arg)
8899 if (!validate_arg (arg, INTEGER_TYPE))
8900 return NULL_TREE;
8901 else
8903 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8904 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8905 build_int_cst (integer_type_node,
8906 ~ (unsigned HOST_WIDE_INT) 0x7f));
8907 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8908 arg, integer_zero_node);
8912 /* Fold a call to builtin toascii with argument ARG. */
8914 static tree
8915 fold_builtin_toascii (location_t loc, tree arg)
8917 if (!validate_arg (arg, INTEGER_TYPE))
8918 return NULL_TREE;
8920 /* Transform toascii(c) -> (c & 0x7f). */
8921 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8922 build_int_cst (integer_type_node, 0x7f));
8925 /* Fold a call to builtin isdigit with argument ARG. */
8927 static tree
8928 fold_builtin_isdigit (location_t loc, tree arg)
8930 if (!validate_arg (arg, INTEGER_TYPE))
8931 return NULL_TREE;
8932 else
8934 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8935 /* According to the C standard, isdigit is unaffected by locale.
8936 However, it definitely is affected by the target character set. */
8937 unsigned HOST_WIDE_INT target_digit0
8938 = lang_hooks.to_target_charset ('0');
8940 if (target_digit0 == 0)
8941 return NULL_TREE;
8943 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8944 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8945 build_int_cst (unsigned_type_node, target_digit0));
8946 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8947 build_int_cst (unsigned_type_node, 9));
8951 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8953 static tree
8954 fold_builtin_fabs (location_t loc, tree arg, tree type)
8956 if (!validate_arg (arg, REAL_TYPE))
8957 return NULL_TREE;
8959 arg = fold_convert_loc (loc, type, arg);
8960 if (TREE_CODE (arg) == REAL_CST)
8961 return fold_abs_const (arg, type);
8962 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8965 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8967 static tree
8968 fold_builtin_abs (location_t loc, tree arg, tree type)
8970 if (!validate_arg (arg, INTEGER_TYPE))
8971 return NULL_TREE;
8973 arg = fold_convert_loc (loc, type, arg);
8974 if (TREE_CODE (arg) == INTEGER_CST)
8975 return fold_abs_const (arg, type);
8976 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8979 /* Fold a fma operation with arguments ARG[012]. */
8981 tree
8982 fold_fma (location_t loc ATTRIBUTE_UNUSED,
8983 tree type, tree arg0, tree arg1, tree arg2)
8985 if (TREE_CODE (arg0) == REAL_CST
8986 && TREE_CODE (arg1) == REAL_CST
8987 && TREE_CODE (arg2) == REAL_CST)
8988 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
8990 return NULL_TREE;
8993 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8995 static tree
8996 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8998 if (validate_arg (arg0, REAL_TYPE)
8999 && validate_arg (arg1, REAL_TYPE)
9000 && validate_arg (arg2, REAL_TYPE))
9002 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9003 if (tem)
9004 return tem;
9006 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9007 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9008 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9010 return NULL_TREE;
9013 /* Fold a call to builtin fmin or fmax. */
9015 static tree
9016 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9017 tree type, bool max)
9019 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9021 /* Calculate the result when the argument is a constant. */
9022 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9024 if (res)
9025 return res;
9027 /* If either argument is NaN, return the other one. Avoid the
9028 transformation if we get (and honor) a signalling NaN. Using
9029 omit_one_operand() ensures we create a non-lvalue. */
9030 if (TREE_CODE (arg0) == REAL_CST
9031 && real_isnan (&TREE_REAL_CST (arg0))
9032 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9033 || ! TREE_REAL_CST (arg0).signalling))
9034 return omit_one_operand_loc (loc, type, arg1, arg0);
9035 if (TREE_CODE (arg1) == REAL_CST
9036 && real_isnan (&TREE_REAL_CST (arg1))
9037 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9038 || ! TREE_REAL_CST (arg1).signalling))
9039 return omit_one_operand_loc (loc, type, arg0, arg1);
9041 /* Transform fmin/fmax(x,x) -> x. */
9042 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9043 return omit_one_operand_loc (loc, type, arg0, arg1);
9045 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9046 functions to return the numeric arg if the other one is NaN.
9047 These tree codes don't honor that, so only transform if
9048 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9049 handled, so we don't have to worry about it either. */
9050 if (flag_finite_math_only)
9051 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9052 fold_convert_loc (loc, type, arg0),
9053 fold_convert_loc (loc, type, arg1));
9055 return NULL_TREE;
9058 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9060 static tree
9061 fold_builtin_carg (location_t loc, tree arg, tree type)
9063 if (validate_arg (arg, COMPLEX_TYPE)
9064 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9066 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9068 if (atan2_fn)
9070 tree new_arg = builtin_save_expr (arg);
9071 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9072 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9073 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9077 return NULL_TREE;
9080 /* Fold a call to builtin logb/ilogb. */
9082 static tree
9083 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9085 if (! validate_arg (arg, REAL_TYPE))
9086 return NULL_TREE;
9088 STRIP_NOPS (arg);
9090 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9092 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9094 switch (value->cl)
9096 case rvc_nan:
9097 case rvc_inf:
9098 /* If arg is Inf or NaN and we're logb, return it. */
9099 if (TREE_CODE (rettype) == REAL_TYPE)
9101 /* For logb(-Inf) we have to return +Inf. */
9102 if (real_isinf (value) && real_isneg (value))
9104 REAL_VALUE_TYPE tem;
9105 real_inf (&tem);
9106 return build_real (rettype, tem);
9108 return fold_convert_loc (loc, rettype, arg);
9110 /* Fall through... */
9111 case rvc_zero:
9112 /* Zero may set errno and/or raise an exception for logb, also
9113 for ilogb we don't know FP_ILOGB0. */
9114 return NULL_TREE;
9115 case rvc_normal:
9116 /* For normal numbers, proceed iff radix == 2. In GCC,
9117 normalized significands are in the range [0.5, 1.0). We
9118 want the exponent as if they were [1.0, 2.0) so get the
9119 exponent and subtract 1. */
9120 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9121 return fold_convert_loc (loc, rettype,
9122 build_int_cst (integer_type_node,
9123 REAL_EXP (value)-1));
9124 break;
9128 return NULL_TREE;
9131 /* Fold a call to builtin significand, if radix == 2. */
9133 static tree
9134 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9136 if (! validate_arg (arg, REAL_TYPE))
9137 return NULL_TREE;
9139 STRIP_NOPS (arg);
9141 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9143 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9145 switch (value->cl)
9147 case rvc_zero:
9148 case rvc_nan:
9149 case rvc_inf:
9150 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9151 return fold_convert_loc (loc, rettype, arg);
9152 case rvc_normal:
9153 /* For normal numbers, proceed iff radix == 2. */
9154 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9156 REAL_VALUE_TYPE result = *value;
9157 /* In GCC, normalized significands are in the range [0.5,
9158 1.0). We want them to be [1.0, 2.0) so set the
9159 exponent to 1. */
9160 SET_REAL_EXP (&result, 1);
9161 return build_real (rettype, result);
9163 break;
9167 return NULL_TREE;
9170 /* Fold a call to builtin frexp, we can assume the base is 2. */
9172 static tree
9173 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9175 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9176 return NULL_TREE;
9178 STRIP_NOPS (arg0);
9180 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9181 return NULL_TREE;
9183 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9185 /* Proceed if a valid pointer type was passed in. */
9186 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9188 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9189 tree frac, exp;
9191 switch (value->cl)
9193 case rvc_zero:
9194 /* For +-0, return (*exp = 0, +-0). */
9195 exp = integer_zero_node;
9196 frac = arg0;
9197 break;
9198 case rvc_nan:
9199 case rvc_inf:
9200 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9201 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9202 case rvc_normal:
9204 /* Since the frexp function always expects base 2, and in
9205 GCC normalized significands are already in the range
9206 [0.5, 1.0), we have exactly what frexp wants. */
9207 REAL_VALUE_TYPE frac_rvt = *value;
9208 SET_REAL_EXP (&frac_rvt, 0);
9209 frac = build_real (rettype, frac_rvt);
9210 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9212 break;
9213 default:
9214 gcc_unreachable ();
9217 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9218 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9219 TREE_SIDE_EFFECTS (arg1) = 1;
9220 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9223 return NULL_TREE;
9226 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9227 then we can assume the base is two. If it's false, then we have to
9228 check the mode of the TYPE parameter in certain cases. */
9230 static tree
9231 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9232 tree type, bool ldexp)
9234 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9236 STRIP_NOPS (arg0);
9237 STRIP_NOPS (arg1);
9239 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9240 if (real_zerop (arg0) || integer_zerop (arg1)
9241 || (TREE_CODE (arg0) == REAL_CST
9242 && !real_isfinite (&TREE_REAL_CST (arg0))))
9243 return omit_one_operand_loc (loc, type, arg0, arg1);
9245 /* If both arguments are constant, then try to evaluate it. */
9246 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9247 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9248 && tree_fits_shwi_p (arg1))
9250 /* Bound the maximum adjustment to twice the range of the
9251 mode's valid exponents. Use abs to ensure the range is
9252 positive as a sanity check. */
9253 const long max_exp_adj = 2 *
9254 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9255 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9257 /* Get the user-requested adjustment. */
9258 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9260 /* The requested adjustment must be inside this range. This
9261 is a preliminary cap to avoid things like overflow, we
9262 may still fail to compute the result for other reasons. */
9263 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9265 REAL_VALUE_TYPE initial_result;
9267 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9269 /* Ensure we didn't overflow. */
9270 if (! real_isinf (&initial_result))
9272 const REAL_VALUE_TYPE trunc_result
9273 = real_value_truncate (TYPE_MODE (type), initial_result);
9275 /* Only proceed if the target mode can hold the
9276 resulting value. */
9277 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9278 return build_real (type, trunc_result);
9284 return NULL_TREE;
9287 /* Fold a call to builtin modf. */
9289 static tree
9290 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9292 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9293 return NULL_TREE;
9295 STRIP_NOPS (arg0);
9297 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9298 return NULL_TREE;
9300 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9302 /* Proceed if a valid pointer type was passed in. */
9303 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9305 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9306 REAL_VALUE_TYPE trunc, frac;
9308 switch (value->cl)
9310 case rvc_nan:
9311 case rvc_zero:
9312 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9313 trunc = frac = *value;
9314 break;
9315 case rvc_inf:
9316 /* For +-Inf, return (*arg1 = arg0, +-0). */
9317 frac = dconst0;
9318 frac.sign = value->sign;
9319 trunc = *value;
9320 break;
9321 case rvc_normal:
9322 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9323 real_trunc (&trunc, VOIDmode, value);
9324 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9325 /* If the original number was negative and already
9326 integral, then the fractional part is -0.0. */
9327 if (value->sign && frac.cl == rvc_zero)
9328 frac.sign = value->sign;
9329 break;
9332 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9333 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9334 build_real (rettype, trunc));
9335 TREE_SIDE_EFFECTS (arg1) = 1;
9336 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9337 build_real (rettype, frac));
9340 return NULL_TREE;
9343 /* Given a location LOC, an interclass builtin function decl FNDECL
9344 and its single argument ARG, return an folded expression computing
9345 the same, or NULL_TREE if we either couldn't or didn't want to fold
9346 (the latter happen if there's an RTL instruction available). */
9348 static tree
9349 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9351 enum machine_mode mode;
9353 if (!validate_arg (arg, REAL_TYPE))
9354 return NULL_TREE;
9356 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9357 return NULL_TREE;
9359 mode = TYPE_MODE (TREE_TYPE (arg));
9361 /* If there is no optab, try generic code. */
9362 switch (DECL_FUNCTION_CODE (fndecl))
9364 tree result;
9366 CASE_FLT_FN (BUILT_IN_ISINF):
9368 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9369 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9370 tree const type = TREE_TYPE (arg);
9371 REAL_VALUE_TYPE r;
9372 char buf[128];
9374 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9375 real_from_string (&r, buf);
9376 result = build_call_expr (isgr_fn, 2,
9377 fold_build1_loc (loc, ABS_EXPR, type, arg),
9378 build_real (type, r));
9379 return result;
9381 CASE_FLT_FN (BUILT_IN_FINITE):
9382 case BUILT_IN_ISFINITE:
9384 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9385 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9386 tree const type = TREE_TYPE (arg);
9387 REAL_VALUE_TYPE r;
9388 char buf[128];
9390 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9391 real_from_string (&r, buf);
9392 result = build_call_expr (isle_fn, 2,
9393 fold_build1_loc (loc, ABS_EXPR, type, arg),
9394 build_real (type, r));
9395 /*result = fold_build2_loc (loc, UNGT_EXPR,
9396 TREE_TYPE (TREE_TYPE (fndecl)),
9397 fold_build1_loc (loc, ABS_EXPR, type, arg),
9398 build_real (type, r));
9399 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9400 TREE_TYPE (TREE_TYPE (fndecl)),
9401 result);*/
9402 return result;
9404 case BUILT_IN_ISNORMAL:
9406 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9407 islessequal(fabs(x),DBL_MAX). */
9408 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9409 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9410 tree const type = TREE_TYPE (arg);
9411 REAL_VALUE_TYPE rmax, rmin;
9412 char buf[128];
9414 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9415 real_from_string (&rmax, buf);
9416 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9417 real_from_string (&rmin, buf);
9418 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9419 result = build_call_expr (isle_fn, 2, arg,
9420 build_real (type, rmax));
9421 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9422 build_call_expr (isge_fn, 2, arg,
9423 build_real (type, rmin)));
9424 return result;
9426 default:
9427 break;
9430 return NULL_TREE;
9433 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9434 ARG is the argument for the call. */
9436 static tree
9437 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9439 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9440 REAL_VALUE_TYPE r;
9442 if (!validate_arg (arg, REAL_TYPE))
9443 return NULL_TREE;
9445 switch (builtin_index)
9447 case BUILT_IN_ISINF:
9448 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9449 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9451 if (TREE_CODE (arg) == REAL_CST)
9453 r = TREE_REAL_CST (arg);
9454 if (real_isinf (&r))
9455 return real_compare (GT_EXPR, &r, &dconst0)
9456 ? integer_one_node : integer_minus_one_node;
9457 else
9458 return integer_zero_node;
9461 return NULL_TREE;
9463 case BUILT_IN_ISINF_SIGN:
9465 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9466 /* In a boolean context, GCC will fold the inner COND_EXPR to
9467 1. So e.g. "if (isinf_sign(x))" would be folded to just
9468 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9469 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9470 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9471 tree tmp = NULL_TREE;
9473 arg = builtin_save_expr (arg);
9475 if (signbit_fn && isinf_fn)
9477 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9478 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9480 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9481 signbit_call, integer_zero_node);
9482 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9483 isinf_call, integer_zero_node);
9485 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9486 integer_minus_one_node, integer_one_node);
9487 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9488 isinf_call, tmp,
9489 integer_zero_node);
9492 return tmp;
9495 case BUILT_IN_ISFINITE:
9496 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9497 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9498 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9500 if (TREE_CODE (arg) == REAL_CST)
9502 r = TREE_REAL_CST (arg);
9503 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9506 return NULL_TREE;
9508 case BUILT_IN_ISNAN:
9509 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9510 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9512 if (TREE_CODE (arg) == REAL_CST)
9514 r = TREE_REAL_CST (arg);
9515 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9518 arg = builtin_save_expr (arg);
9519 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9521 default:
9522 gcc_unreachable ();
9526 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9527 This builtin will generate code to return the appropriate floating
9528 point classification depending on the value of the floating point
9529 number passed in. The possible return values must be supplied as
9530 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9531 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9532 one floating point argument which is "type generic". */
9534 static tree
9535 fold_builtin_fpclassify (location_t loc, tree exp)
9537 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9538 arg, type, res, tmp;
9539 enum machine_mode mode;
9540 REAL_VALUE_TYPE r;
9541 char buf[128];
9543 /* Verify the required arguments in the original call. */
9544 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9545 INTEGER_TYPE, INTEGER_TYPE,
9546 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9547 return NULL_TREE;
9549 fp_nan = CALL_EXPR_ARG (exp, 0);
9550 fp_infinite = CALL_EXPR_ARG (exp, 1);
9551 fp_normal = CALL_EXPR_ARG (exp, 2);
9552 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9553 fp_zero = CALL_EXPR_ARG (exp, 4);
9554 arg = CALL_EXPR_ARG (exp, 5);
9555 type = TREE_TYPE (arg);
9556 mode = TYPE_MODE (type);
9557 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9559 /* fpclassify(x) ->
9560 isnan(x) ? FP_NAN :
9561 (fabs(x) == Inf ? FP_INFINITE :
9562 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9563 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9565 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9566 build_real (type, dconst0));
9567 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9568 tmp, fp_zero, fp_subnormal);
9570 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9571 real_from_string (&r, buf);
9572 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9573 arg, build_real (type, r));
9574 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9576 if (HONOR_INFINITIES (mode))
9578 real_inf (&r);
9579 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9580 build_real (type, r));
9581 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9582 fp_infinite, res);
9585 if (HONOR_NANS (mode))
9587 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9588 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9591 return res;
9594 /* Fold a call to an unordered comparison function such as
9595 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9596 being called and ARG0 and ARG1 are the arguments for the call.
9597 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9598 the opposite of the desired result. UNORDERED_CODE is used
9599 for modes that can hold NaNs and ORDERED_CODE is used for
9600 the rest. */
9602 static tree
9603 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9604 enum tree_code unordered_code,
9605 enum tree_code ordered_code)
9607 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9608 enum tree_code code;
9609 tree type0, type1;
9610 enum tree_code code0, code1;
9611 tree cmp_type = NULL_TREE;
9613 type0 = TREE_TYPE (arg0);
9614 type1 = TREE_TYPE (arg1);
9616 code0 = TREE_CODE (type0);
9617 code1 = TREE_CODE (type1);
9619 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9620 /* Choose the wider of two real types. */
9621 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9622 ? type0 : type1;
9623 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9624 cmp_type = type0;
9625 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9626 cmp_type = type1;
9628 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9629 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9631 if (unordered_code == UNORDERED_EXPR)
9633 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9634 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9635 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9638 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9639 : ordered_code;
9640 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9641 fold_build2_loc (loc, code, type, arg0, arg1));
9644 /* Fold a call to built-in function FNDECL with 0 arguments.
9645 IGNORE is true if the result of the function call is ignored. This
9646 function returns NULL_TREE if no simplification was possible. */
9648 static tree
9649 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9651 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9652 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9653 switch (fcode)
9655 CASE_FLT_FN (BUILT_IN_INF):
9656 case BUILT_IN_INFD32:
9657 case BUILT_IN_INFD64:
9658 case BUILT_IN_INFD128:
9659 return fold_builtin_inf (loc, type, true);
9661 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9662 return fold_builtin_inf (loc, type, false);
9664 case BUILT_IN_CLASSIFY_TYPE:
9665 return fold_builtin_classify_type (NULL_TREE);
9667 case BUILT_IN_UNREACHABLE:
9668 if (flag_sanitize & SANITIZE_UNREACHABLE
9669 && (current_function_decl == NULL
9670 || !lookup_attribute ("no_sanitize_undefined",
9671 DECL_ATTRIBUTES (current_function_decl))))
9672 return ubsan_instrument_unreachable (loc);
9673 break;
9675 default:
9676 break;
9678 return NULL_TREE;
9681 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9682 IGNORE is true if the result of the function call is ignored. This
9683 function returns NULL_TREE if no simplification was possible. */
9685 static tree
9686 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9688 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9689 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9690 switch (fcode)
9692 case BUILT_IN_CONSTANT_P:
9694 tree val = fold_builtin_constant_p (arg0);
9696 /* Gimplification will pull the CALL_EXPR for the builtin out of
9697 an if condition. When not optimizing, we'll not CSE it back.
9698 To avoid link error types of regressions, return false now. */
9699 if (!val && !optimize)
9700 val = integer_zero_node;
9702 return val;
9705 case BUILT_IN_CLASSIFY_TYPE:
9706 return fold_builtin_classify_type (arg0);
9708 case BUILT_IN_STRLEN:
9709 return fold_builtin_strlen (loc, type, arg0);
9711 CASE_FLT_FN (BUILT_IN_FABS):
9712 case BUILT_IN_FABSD32:
9713 case BUILT_IN_FABSD64:
9714 case BUILT_IN_FABSD128:
9715 return fold_builtin_fabs (loc, arg0, type);
9717 case BUILT_IN_ABS:
9718 case BUILT_IN_LABS:
9719 case BUILT_IN_LLABS:
9720 case BUILT_IN_IMAXABS:
9721 return fold_builtin_abs (loc, arg0, type);
9723 CASE_FLT_FN (BUILT_IN_CONJ):
9724 if (validate_arg (arg0, COMPLEX_TYPE)
9725 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9726 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9727 break;
9729 CASE_FLT_FN (BUILT_IN_CREAL):
9730 if (validate_arg (arg0, COMPLEX_TYPE)
9731 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9732 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9733 break;
9735 CASE_FLT_FN (BUILT_IN_CIMAG):
9736 if (validate_arg (arg0, COMPLEX_TYPE)
9737 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9738 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9739 break;
9741 CASE_FLT_FN (BUILT_IN_CCOS):
9742 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9744 CASE_FLT_FN (BUILT_IN_CCOSH):
9745 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9747 CASE_FLT_FN (BUILT_IN_CPROJ):
9748 return fold_builtin_cproj (loc, arg0, type);
9750 CASE_FLT_FN (BUILT_IN_CSIN):
9751 if (validate_arg (arg0, COMPLEX_TYPE)
9752 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9753 return do_mpc_arg1 (arg0, type, mpc_sin);
9754 break;
9756 CASE_FLT_FN (BUILT_IN_CSINH):
9757 if (validate_arg (arg0, COMPLEX_TYPE)
9758 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9759 return do_mpc_arg1 (arg0, type, mpc_sinh);
9760 break;
9762 CASE_FLT_FN (BUILT_IN_CTAN):
9763 if (validate_arg (arg0, COMPLEX_TYPE)
9764 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9765 return do_mpc_arg1 (arg0, type, mpc_tan);
9766 break;
9768 CASE_FLT_FN (BUILT_IN_CTANH):
9769 if (validate_arg (arg0, COMPLEX_TYPE)
9770 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9771 return do_mpc_arg1 (arg0, type, mpc_tanh);
9772 break;
9774 CASE_FLT_FN (BUILT_IN_CLOG):
9775 if (validate_arg (arg0, COMPLEX_TYPE)
9776 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9777 return do_mpc_arg1 (arg0, type, mpc_log);
9778 break;
9780 CASE_FLT_FN (BUILT_IN_CSQRT):
9781 if (validate_arg (arg0, COMPLEX_TYPE)
9782 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9783 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9784 break;
9786 CASE_FLT_FN (BUILT_IN_CASIN):
9787 if (validate_arg (arg0, COMPLEX_TYPE)
9788 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9789 return do_mpc_arg1 (arg0, type, mpc_asin);
9790 break;
9792 CASE_FLT_FN (BUILT_IN_CACOS):
9793 if (validate_arg (arg0, COMPLEX_TYPE)
9794 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9795 return do_mpc_arg1 (arg0, type, mpc_acos);
9796 break;
9798 CASE_FLT_FN (BUILT_IN_CATAN):
9799 if (validate_arg (arg0, COMPLEX_TYPE)
9800 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9801 return do_mpc_arg1 (arg0, type, mpc_atan);
9802 break;
9804 CASE_FLT_FN (BUILT_IN_CASINH):
9805 if (validate_arg (arg0, COMPLEX_TYPE)
9806 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9807 return do_mpc_arg1 (arg0, type, mpc_asinh);
9808 break;
9810 CASE_FLT_FN (BUILT_IN_CACOSH):
9811 if (validate_arg (arg0, COMPLEX_TYPE)
9812 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9813 return do_mpc_arg1 (arg0, type, mpc_acosh);
9814 break;
9816 CASE_FLT_FN (BUILT_IN_CATANH):
9817 if (validate_arg (arg0, COMPLEX_TYPE)
9818 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9819 return do_mpc_arg1 (arg0, type, mpc_atanh);
9820 break;
9822 CASE_FLT_FN (BUILT_IN_CABS):
9823 return fold_builtin_cabs (loc, arg0, type, fndecl);
9825 CASE_FLT_FN (BUILT_IN_CARG):
9826 return fold_builtin_carg (loc, arg0, type);
9828 CASE_FLT_FN (BUILT_IN_SQRT):
9829 return fold_builtin_sqrt (loc, arg0, type);
9831 CASE_FLT_FN (BUILT_IN_CBRT):
9832 return fold_builtin_cbrt (loc, arg0, type);
9834 CASE_FLT_FN (BUILT_IN_ASIN):
9835 if (validate_arg (arg0, REAL_TYPE))
9836 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9837 &dconstm1, &dconst1, true);
9838 break;
9840 CASE_FLT_FN (BUILT_IN_ACOS):
9841 if (validate_arg (arg0, REAL_TYPE))
9842 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9843 &dconstm1, &dconst1, true);
9844 break;
9846 CASE_FLT_FN (BUILT_IN_ATAN):
9847 if (validate_arg (arg0, REAL_TYPE))
9848 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9849 break;
9851 CASE_FLT_FN (BUILT_IN_ASINH):
9852 if (validate_arg (arg0, REAL_TYPE))
9853 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9854 break;
9856 CASE_FLT_FN (BUILT_IN_ACOSH):
9857 if (validate_arg (arg0, REAL_TYPE))
9858 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9859 &dconst1, NULL, true);
9860 break;
9862 CASE_FLT_FN (BUILT_IN_ATANH):
9863 if (validate_arg (arg0, REAL_TYPE))
9864 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9865 &dconstm1, &dconst1, false);
9866 break;
9868 CASE_FLT_FN (BUILT_IN_SIN):
9869 if (validate_arg (arg0, REAL_TYPE))
9870 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9871 break;
9873 CASE_FLT_FN (BUILT_IN_COS):
9874 return fold_builtin_cos (loc, arg0, type, fndecl);
9876 CASE_FLT_FN (BUILT_IN_TAN):
9877 return fold_builtin_tan (arg0, type);
9879 CASE_FLT_FN (BUILT_IN_CEXP):
9880 return fold_builtin_cexp (loc, arg0, type);
9882 CASE_FLT_FN (BUILT_IN_CEXPI):
9883 if (validate_arg (arg0, REAL_TYPE))
9884 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9885 break;
9887 CASE_FLT_FN (BUILT_IN_SINH):
9888 if (validate_arg (arg0, REAL_TYPE))
9889 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9890 break;
9892 CASE_FLT_FN (BUILT_IN_COSH):
9893 return fold_builtin_cosh (loc, arg0, type, fndecl);
9895 CASE_FLT_FN (BUILT_IN_TANH):
9896 if (validate_arg (arg0, REAL_TYPE))
9897 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9898 break;
9900 CASE_FLT_FN (BUILT_IN_ERF):
9901 if (validate_arg (arg0, REAL_TYPE))
9902 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9903 break;
9905 CASE_FLT_FN (BUILT_IN_ERFC):
9906 if (validate_arg (arg0, REAL_TYPE))
9907 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9908 break;
9910 CASE_FLT_FN (BUILT_IN_TGAMMA):
9911 if (validate_arg (arg0, REAL_TYPE))
9912 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9913 break;
9915 CASE_FLT_FN (BUILT_IN_EXP):
9916 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9918 CASE_FLT_FN (BUILT_IN_EXP2):
9919 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9921 CASE_FLT_FN (BUILT_IN_EXP10):
9922 CASE_FLT_FN (BUILT_IN_POW10):
9923 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9925 CASE_FLT_FN (BUILT_IN_EXPM1):
9926 if (validate_arg (arg0, REAL_TYPE))
9927 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9928 break;
9930 CASE_FLT_FN (BUILT_IN_LOG):
9931 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9933 CASE_FLT_FN (BUILT_IN_LOG2):
9934 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9936 CASE_FLT_FN (BUILT_IN_LOG10):
9937 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9939 CASE_FLT_FN (BUILT_IN_LOG1P):
9940 if (validate_arg (arg0, REAL_TYPE))
9941 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9942 &dconstm1, NULL, false);
9943 break;
9945 CASE_FLT_FN (BUILT_IN_J0):
9946 if (validate_arg (arg0, REAL_TYPE))
9947 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9948 NULL, NULL, 0);
9949 break;
9951 CASE_FLT_FN (BUILT_IN_J1):
9952 if (validate_arg (arg0, REAL_TYPE))
9953 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9954 NULL, NULL, 0);
9955 break;
9957 CASE_FLT_FN (BUILT_IN_Y0):
9958 if (validate_arg (arg0, REAL_TYPE))
9959 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9960 &dconst0, NULL, false);
9961 break;
9963 CASE_FLT_FN (BUILT_IN_Y1):
9964 if (validate_arg (arg0, REAL_TYPE))
9965 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9966 &dconst0, NULL, false);
9967 break;
9969 CASE_FLT_FN (BUILT_IN_NAN):
9970 case BUILT_IN_NAND32:
9971 case BUILT_IN_NAND64:
9972 case BUILT_IN_NAND128:
9973 return fold_builtin_nan (arg0, type, true);
9975 CASE_FLT_FN (BUILT_IN_NANS):
9976 return fold_builtin_nan (arg0, type, false);
9978 CASE_FLT_FN (BUILT_IN_FLOOR):
9979 return fold_builtin_floor (loc, fndecl, arg0);
9981 CASE_FLT_FN (BUILT_IN_CEIL):
9982 return fold_builtin_ceil (loc, fndecl, arg0);
9984 CASE_FLT_FN (BUILT_IN_TRUNC):
9985 return fold_builtin_trunc (loc, fndecl, arg0);
9987 CASE_FLT_FN (BUILT_IN_ROUND):
9988 return fold_builtin_round (loc, fndecl, arg0);
9990 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9991 CASE_FLT_FN (BUILT_IN_RINT):
9992 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9994 CASE_FLT_FN (BUILT_IN_ICEIL):
9995 CASE_FLT_FN (BUILT_IN_LCEIL):
9996 CASE_FLT_FN (BUILT_IN_LLCEIL):
9997 CASE_FLT_FN (BUILT_IN_LFLOOR):
9998 CASE_FLT_FN (BUILT_IN_IFLOOR):
9999 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10000 CASE_FLT_FN (BUILT_IN_IROUND):
10001 CASE_FLT_FN (BUILT_IN_LROUND):
10002 CASE_FLT_FN (BUILT_IN_LLROUND):
10003 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10005 CASE_FLT_FN (BUILT_IN_IRINT):
10006 CASE_FLT_FN (BUILT_IN_LRINT):
10007 CASE_FLT_FN (BUILT_IN_LLRINT):
10008 return fold_fixed_mathfn (loc, fndecl, arg0);
10010 case BUILT_IN_BSWAP16:
10011 case BUILT_IN_BSWAP32:
10012 case BUILT_IN_BSWAP64:
10013 return fold_builtin_bswap (fndecl, arg0);
10015 CASE_INT_FN (BUILT_IN_FFS):
10016 CASE_INT_FN (BUILT_IN_CLZ):
10017 CASE_INT_FN (BUILT_IN_CTZ):
10018 CASE_INT_FN (BUILT_IN_CLRSB):
10019 CASE_INT_FN (BUILT_IN_POPCOUNT):
10020 CASE_INT_FN (BUILT_IN_PARITY):
10021 return fold_builtin_bitop (fndecl, arg0);
10023 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10024 return fold_builtin_signbit (loc, arg0, type);
10026 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10027 return fold_builtin_significand (loc, arg0, type);
10029 CASE_FLT_FN (BUILT_IN_ILOGB):
10030 CASE_FLT_FN (BUILT_IN_LOGB):
10031 return fold_builtin_logb (loc, arg0, type);
10033 case BUILT_IN_ISASCII:
10034 return fold_builtin_isascii (loc, arg0);
10036 case BUILT_IN_TOASCII:
10037 return fold_builtin_toascii (loc, arg0);
10039 case BUILT_IN_ISDIGIT:
10040 return fold_builtin_isdigit (loc, arg0);
10042 CASE_FLT_FN (BUILT_IN_FINITE):
10043 case BUILT_IN_FINITED32:
10044 case BUILT_IN_FINITED64:
10045 case BUILT_IN_FINITED128:
10046 case BUILT_IN_ISFINITE:
10048 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10049 if (ret)
10050 return ret;
10051 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10054 CASE_FLT_FN (BUILT_IN_ISINF):
10055 case BUILT_IN_ISINFD32:
10056 case BUILT_IN_ISINFD64:
10057 case BUILT_IN_ISINFD128:
10059 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10060 if (ret)
10061 return ret;
10062 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10065 case BUILT_IN_ISNORMAL:
10066 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10068 case BUILT_IN_ISINF_SIGN:
10069 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10071 CASE_FLT_FN (BUILT_IN_ISNAN):
10072 case BUILT_IN_ISNAND32:
10073 case BUILT_IN_ISNAND64:
10074 case BUILT_IN_ISNAND128:
10075 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10077 case BUILT_IN_PRINTF:
10078 case BUILT_IN_PRINTF_UNLOCKED:
10079 case BUILT_IN_VPRINTF:
10080 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10082 case BUILT_IN_FREE:
10083 if (integer_zerop (arg0))
10084 return build_empty_stmt (loc);
10085 break;
10087 default:
10088 break;
10091 return NULL_TREE;
10095 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10096 IGNORE is true if the result of the function call is ignored. This
10097 function returns NULL_TREE if no simplification was possible. */
10099 static tree
10100 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10102 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10103 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10105 switch (fcode)
10107 CASE_FLT_FN (BUILT_IN_JN):
10108 if (validate_arg (arg0, INTEGER_TYPE)
10109 && validate_arg (arg1, REAL_TYPE))
10110 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10111 break;
10113 CASE_FLT_FN (BUILT_IN_YN):
10114 if (validate_arg (arg0, INTEGER_TYPE)
10115 && validate_arg (arg1, REAL_TYPE))
10116 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10117 &dconst0, false);
10118 break;
10120 CASE_FLT_FN (BUILT_IN_DREM):
10121 CASE_FLT_FN (BUILT_IN_REMAINDER):
10122 if (validate_arg (arg0, REAL_TYPE)
10123 && validate_arg (arg1, REAL_TYPE))
10124 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10125 break;
10127 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10128 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10129 if (validate_arg (arg0, REAL_TYPE)
10130 && validate_arg (arg1, POINTER_TYPE))
10131 return do_mpfr_lgamma_r (arg0, arg1, type);
10132 break;
10134 CASE_FLT_FN (BUILT_IN_ATAN2):
10135 if (validate_arg (arg0, REAL_TYPE)
10136 && validate_arg (arg1, REAL_TYPE))
10137 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10138 break;
10140 CASE_FLT_FN (BUILT_IN_FDIM):
10141 if (validate_arg (arg0, REAL_TYPE)
10142 && validate_arg (arg1, REAL_TYPE))
10143 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10144 break;
10146 CASE_FLT_FN (BUILT_IN_HYPOT):
10147 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10149 CASE_FLT_FN (BUILT_IN_CPOW):
10150 if (validate_arg (arg0, COMPLEX_TYPE)
10151 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10152 && validate_arg (arg1, COMPLEX_TYPE)
10153 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10154 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10155 break;
10157 CASE_FLT_FN (BUILT_IN_LDEXP):
10158 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10159 CASE_FLT_FN (BUILT_IN_SCALBN):
10160 CASE_FLT_FN (BUILT_IN_SCALBLN):
10161 return fold_builtin_load_exponent (loc, arg0, arg1,
10162 type, /*ldexp=*/false);
10164 CASE_FLT_FN (BUILT_IN_FREXP):
10165 return fold_builtin_frexp (loc, arg0, arg1, type);
10167 CASE_FLT_FN (BUILT_IN_MODF):
10168 return fold_builtin_modf (loc, arg0, arg1, type);
10170 case BUILT_IN_STRSTR:
10171 return fold_builtin_strstr (loc, arg0, arg1, type);
10173 case BUILT_IN_STRSPN:
10174 return fold_builtin_strspn (loc, arg0, arg1);
10176 case BUILT_IN_STRCSPN:
10177 return fold_builtin_strcspn (loc, arg0, arg1);
10179 case BUILT_IN_STRCHR:
10180 case BUILT_IN_INDEX:
10181 return fold_builtin_strchr (loc, arg0, arg1, type);
10183 case BUILT_IN_STRRCHR:
10184 case BUILT_IN_RINDEX:
10185 return fold_builtin_strrchr (loc, arg0, arg1, type);
10187 case BUILT_IN_STPCPY:
10188 if (ignore)
10190 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10191 if (!fn)
10192 break;
10194 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10196 else
10197 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10198 break;
10200 case BUILT_IN_STRCMP:
10201 return fold_builtin_strcmp (loc, arg0, arg1);
10203 case BUILT_IN_STRPBRK:
10204 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10206 case BUILT_IN_EXPECT:
10207 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10209 CASE_FLT_FN (BUILT_IN_POW):
10210 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10212 CASE_FLT_FN (BUILT_IN_POWI):
10213 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10215 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10216 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10218 CASE_FLT_FN (BUILT_IN_FMIN):
10219 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10221 CASE_FLT_FN (BUILT_IN_FMAX):
10222 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10224 case BUILT_IN_ISGREATER:
10225 return fold_builtin_unordered_cmp (loc, fndecl,
10226 arg0, arg1, UNLE_EXPR, LE_EXPR);
10227 case BUILT_IN_ISGREATEREQUAL:
10228 return fold_builtin_unordered_cmp (loc, fndecl,
10229 arg0, arg1, UNLT_EXPR, LT_EXPR);
10230 case BUILT_IN_ISLESS:
10231 return fold_builtin_unordered_cmp (loc, fndecl,
10232 arg0, arg1, UNGE_EXPR, GE_EXPR);
10233 case BUILT_IN_ISLESSEQUAL:
10234 return fold_builtin_unordered_cmp (loc, fndecl,
10235 arg0, arg1, UNGT_EXPR, GT_EXPR);
10236 case BUILT_IN_ISLESSGREATER:
10237 return fold_builtin_unordered_cmp (loc, fndecl,
10238 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10239 case BUILT_IN_ISUNORDERED:
10240 return fold_builtin_unordered_cmp (loc, fndecl,
10241 arg0, arg1, UNORDERED_EXPR,
10242 NOP_EXPR);
10244 /* We do the folding for va_start in the expander. */
10245 case BUILT_IN_VA_START:
10246 break;
10248 case BUILT_IN_OBJECT_SIZE:
10249 return fold_builtin_object_size (arg0, arg1);
10251 case BUILT_IN_PRINTF:
10252 case BUILT_IN_PRINTF_UNLOCKED:
10253 case BUILT_IN_VPRINTF:
10254 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10256 case BUILT_IN_PRINTF_CHK:
10257 case BUILT_IN_VPRINTF_CHK:
10258 if (!validate_arg (arg0, INTEGER_TYPE)
10259 || TREE_SIDE_EFFECTS (arg0))
10260 return NULL_TREE;
10261 else
10262 return fold_builtin_printf (loc, fndecl,
10263 arg1, NULL_TREE, ignore, fcode);
10264 break;
10266 case BUILT_IN_FPRINTF:
10267 case BUILT_IN_FPRINTF_UNLOCKED:
10268 case BUILT_IN_VFPRINTF:
10269 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10270 ignore, fcode);
10272 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10273 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10275 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10276 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10278 default:
10279 break;
10281 return NULL_TREE;
10284 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10285 and ARG2. IGNORE is true if the result of the function call is ignored.
10286 This function returns NULL_TREE if no simplification was possible. */
10288 static tree
10289 fold_builtin_3 (location_t loc, tree fndecl,
10290 tree arg0, tree arg1, tree arg2, bool ignore)
10292 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10293 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10294 switch (fcode)
10297 CASE_FLT_FN (BUILT_IN_SINCOS):
10298 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10300 CASE_FLT_FN (BUILT_IN_FMA):
10301 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10302 break;
10304 CASE_FLT_FN (BUILT_IN_REMQUO):
10305 if (validate_arg (arg0, REAL_TYPE)
10306 && validate_arg (arg1, REAL_TYPE)
10307 && validate_arg (arg2, POINTER_TYPE))
10308 return do_mpfr_remquo (arg0, arg1, arg2);
10309 break;
10311 case BUILT_IN_STRNCAT:
10312 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10314 case BUILT_IN_STRNCMP:
10315 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10317 case BUILT_IN_MEMCHR:
10318 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10320 case BUILT_IN_BCMP:
10321 case BUILT_IN_MEMCMP:
10322 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10324 case BUILT_IN_STRCAT_CHK:
10325 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10327 case BUILT_IN_PRINTF_CHK:
10328 case BUILT_IN_VPRINTF_CHK:
10329 if (!validate_arg (arg0, INTEGER_TYPE)
10330 || TREE_SIDE_EFFECTS (arg0))
10331 return NULL_TREE;
10332 else
10333 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10334 break;
10336 case BUILT_IN_FPRINTF:
10337 case BUILT_IN_FPRINTF_UNLOCKED:
10338 case BUILT_IN_VFPRINTF:
10339 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10340 ignore, fcode);
10342 case BUILT_IN_FPRINTF_CHK:
10343 case BUILT_IN_VFPRINTF_CHK:
10344 if (!validate_arg (arg1, INTEGER_TYPE)
10345 || TREE_SIDE_EFFECTS (arg1))
10346 return NULL_TREE;
10347 else
10348 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10349 ignore, fcode);
10351 case BUILT_IN_EXPECT:
10352 return fold_builtin_expect (loc, arg0, arg1, arg2);
10354 default:
10355 break;
10357 return NULL_TREE;
10360 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10361 ARG2, and ARG3. IGNORE is true if the result of the function call is
10362 ignored. This function returns NULL_TREE if no simplification was
10363 possible. */
10365 static tree
10366 fold_builtin_4 (location_t loc, tree fndecl,
10367 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10369 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10371 switch (fcode)
10373 case BUILT_IN_STRNCAT_CHK:
10374 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10376 case BUILT_IN_FPRINTF_CHK:
10377 case BUILT_IN_VFPRINTF_CHK:
10378 if (!validate_arg (arg1, INTEGER_TYPE)
10379 || TREE_SIDE_EFFECTS (arg1))
10380 return NULL_TREE;
10381 else
10382 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10383 ignore, fcode);
10384 break;
10386 default:
10387 break;
10389 return NULL_TREE;
10392 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10393 arguments, where NARGS <= 4. IGNORE is true if the result of the
10394 function call is ignored. This function returns NULL_TREE if no
10395 simplification was possible. Note that this only folds builtins with
10396 fixed argument patterns. Foldings that do varargs-to-varargs
10397 transformations, or that match calls with more than 4 arguments,
10398 need to be handled with fold_builtin_varargs instead. */
10400 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10402 static tree
10403 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10405 tree ret = NULL_TREE;
10407 switch (nargs)
10409 case 0:
10410 ret = fold_builtin_0 (loc, fndecl, ignore);
10411 break;
10412 case 1:
10413 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10414 break;
10415 case 2:
10416 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10417 break;
10418 case 3:
10419 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10420 break;
10421 case 4:
10422 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10423 ignore);
10424 break;
10425 default:
10426 break;
10428 if (ret)
10430 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10431 SET_EXPR_LOCATION (ret, loc);
10432 TREE_NO_WARNING (ret) = 1;
10433 return ret;
10435 return NULL_TREE;
10438 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10439 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10440 of arguments in ARGS to be omitted. OLDNARGS is the number of
10441 elements in ARGS. */
10443 static tree
10444 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10445 int skip, tree fndecl, int n, va_list newargs)
10447 int nargs = oldnargs - skip + n;
10448 tree *buffer;
10450 if (n > 0)
10452 int i, j;
10454 buffer = XALLOCAVEC (tree, nargs);
10455 for (i = 0; i < n; i++)
10456 buffer[i] = va_arg (newargs, tree);
10457 for (j = skip; j < oldnargs; j++, i++)
10458 buffer[i] = args[j];
10460 else
10461 buffer = args + skip;
10463 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10466 /* Return true if FNDECL shouldn't be folded right now.
10467 If a built-in function has an inline attribute always_inline
10468 wrapper, defer folding it after always_inline functions have
10469 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10470 might not be performed. */
10472 bool
10473 avoid_folding_inline_builtin (tree fndecl)
10475 return (DECL_DECLARED_INLINE_P (fndecl)
10476 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10477 && cfun
10478 && !cfun->always_inline_functions_inlined
10479 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10482 /* A wrapper function for builtin folding that prevents warnings for
10483 "statement without effect" and the like, caused by removing the
10484 call node earlier than the warning is generated. */
10486 tree
10487 fold_call_expr (location_t loc, tree exp, bool ignore)
10489 tree ret = NULL_TREE;
10490 tree fndecl = get_callee_fndecl (exp);
10491 if (fndecl
10492 && TREE_CODE (fndecl) == FUNCTION_DECL
10493 && DECL_BUILT_IN (fndecl)
10494 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10495 yet. Defer folding until we see all the arguments
10496 (after inlining). */
10497 && !CALL_EXPR_VA_ARG_PACK (exp))
10499 int nargs = call_expr_nargs (exp);
10501 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10502 instead last argument is __builtin_va_arg_pack (). Defer folding
10503 even in that case, until arguments are finalized. */
10504 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10506 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10507 if (fndecl2
10508 && TREE_CODE (fndecl2) == FUNCTION_DECL
10509 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10510 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10511 return NULL_TREE;
10514 if (avoid_folding_inline_builtin (fndecl))
10515 return NULL_TREE;
10517 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10518 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10519 CALL_EXPR_ARGP (exp), ignore);
10520 else
10522 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10524 tree *args = CALL_EXPR_ARGP (exp);
10525 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10527 if (!ret)
10528 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10529 if (ret)
10530 return ret;
10533 return NULL_TREE;
10536 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10537 N arguments are passed in the array ARGARRAY. */
10539 tree
10540 fold_builtin_call_array (location_t loc, tree type,
10541 tree fn,
10542 int n,
10543 tree *argarray)
10545 tree ret = NULL_TREE;
10546 tree exp;
10548 if (TREE_CODE (fn) == ADDR_EXPR)
10550 tree fndecl = TREE_OPERAND (fn, 0);
10551 if (TREE_CODE (fndecl) == FUNCTION_DECL
10552 && DECL_BUILT_IN (fndecl))
10554 /* If last argument is __builtin_va_arg_pack (), arguments to this
10555 function are not finalized yet. Defer folding until they are. */
10556 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10558 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10559 if (fndecl2
10560 && TREE_CODE (fndecl2) == FUNCTION_DECL
10561 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10562 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10563 return build_call_array_loc (loc, type, fn, n, argarray);
10565 if (avoid_folding_inline_builtin (fndecl))
10566 return build_call_array_loc (loc, type, fn, n, argarray);
10567 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10569 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10570 if (ret)
10571 return ret;
10573 return build_call_array_loc (loc, type, fn, n, argarray);
10575 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10577 /* First try the transformations that don't require consing up
10578 an exp. */
10579 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10580 if (ret)
10581 return ret;
10584 /* If we got this far, we need to build an exp. */
10585 exp = build_call_array_loc (loc, type, fn, n, argarray);
10586 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10587 return ret ? ret : exp;
10591 return build_call_array_loc (loc, type, fn, n, argarray);
10594 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10595 along with N new arguments specified as the "..." parameters. SKIP
10596 is the number of arguments in EXP to be omitted. This function is used
10597 to do varargs-to-varargs transformations. */
10599 static tree
10600 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10602 va_list ap;
10603 tree t;
10605 va_start (ap, n);
10606 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10607 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10608 va_end (ap);
10610 return t;
10613 /* Validate a single argument ARG against a tree code CODE representing
10614 a type. */
10616 static bool
10617 validate_arg (const_tree arg, enum tree_code code)
10619 if (!arg)
10620 return false;
10621 else if (code == POINTER_TYPE)
10622 return POINTER_TYPE_P (TREE_TYPE (arg));
10623 else if (code == INTEGER_TYPE)
10624 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10625 return code == TREE_CODE (TREE_TYPE (arg));
10628 /* This function validates the types of a function call argument list
10629 against a specified list of tree_codes. If the last specifier is a 0,
10630 that represents an ellipses, otherwise the last specifier must be a
10631 VOID_TYPE.
10633 This is the GIMPLE version of validate_arglist. Eventually we want to
10634 completely convert builtins.c to work from GIMPLEs and the tree based
10635 validate_arglist will then be removed. */
10637 bool
10638 validate_gimple_arglist (const_gimple call, ...)
10640 enum tree_code code;
10641 bool res = 0;
10642 va_list ap;
10643 const_tree arg;
10644 size_t i;
10646 va_start (ap, call);
10647 i = 0;
10651 code = (enum tree_code) va_arg (ap, int);
10652 switch (code)
10654 case 0:
10655 /* This signifies an ellipses, any further arguments are all ok. */
10656 res = true;
10657 goto end;
10658 case VOID_TYPE:
10659 /* This signifies an endlink, if no arguments remain, return
10660 true, otherwise return false. */
10661 res = (i == gimple_call_num_args (call));
10662 goto end;
10663 default:
10664 /* If no parameters remain or the parameter's code does not
10665 match the specified code, return false. Otherwise continue
10666 checking any remaining arguments. */
10667 arg = gimple_call_arg (call, i++);
10668 if (!validate_arg (arg, code))
10669 goto end;
10670 break;
10673 while (1);
10675 /* We need gotos here since we can only have one VA_CLOSE in a
10676 function. */
10677 end: ;
10678 va_end (ap);
10680 return res;
10683 /* Default target-specific builtin expander that does nothing. */
10686 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10687 rtx target ATTRIBUTE_UNUSED,
10688 rtx subtarget ATTRIBUTE_UNUSED,
10689 enum machine_mode mode ATTRIBUTE_UNUSED,
10690 int ignore ATTRIBUTE_UNUSED)
10692 return NULL_RTX;
10695 /* Returns true is EXP represents data that would potentially reside
10696 in a readonly section. */
10698 bool
10699 readonly_data_expr (tree exp)
10701 STRIP_NOPS (exp);
10703 if (TREE_CODE (exp) != ADDR_EXPR)
10704 return false;
10706 exp = get_base_address (TREE_OPERAND (exp, 0));
10707 if (!exp)
10708 return false;
10710 /* Make sure we call decl_readonly_section only for trees it
10711 can handle (since it returns true for everything it doesn't
10712 understand). */
10713 if (TREE_CODE (exp) == STRING_CST
10714 || TREE_CODE (exp) == CONSTRUCTOR
10715 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10716 return decl_readonly_section (exp, 0);
10717 else
10718 return false;
10721 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10722 to the call, and TYPE is its return type.
10724 Return NULL_TREE if no simplification was possible, otherwise return the
10725 simplified form of the call as a tree.
10727 The simplified form may be a constant or other expression which
10728 computes the same value, but in a more efficient manner (including
10729 calls to other builtin functions).
10731 The call may contain arguments which need to be evaluated, but
10732 which are not useful to determine the result of the call. In
10733 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10734 COMPOUND_EXPR will be an argument which must be evaluated.
10735 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10736 COMPOUND_EXPR in the chain will contain the tree for the simplified
10737 form of the builtin function call. */
10739 static tree
10740 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10742 if (!validate_arg (s1, POINTER_TYPE)
10743 || !validate_arg (s2, POINTER_TYPE))
10744 return NULL_TREE;
10745 else
10747 tree fn;
10748 const char *p1, *p2;
10750 p2 = c_getstr (s2);
10751 if (p2 == NULL)
10752 return NULL_TREE;
10754 p1 = c_getstr (s1);
10755 if (p1 != NULL)
10757 const char *r = strstr (p1, p2);
10758 tree tem;
10760 if (r == NULL)
10761 return build_int_cst (TREE_TYPE (s1), 0);
10763 /* Return an offset into the constant string argument. */
10764 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10765 return fold_convert_loc (loc, type, tem);
10768 /* The argument is const char *, and the result is char *, so we need
10769 a type conversion here to avoid a warning. */
10770 if (p2[0] == '\0')
10771 return fold_convert_loc (loc, type, s1);
10773 if (p2[1] != '\0')
10774 return NULL_TREE;
10776 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10777 if (!fn)
10778 return NULL_TREE;
10780 /* New argument list transforming strstr(s1, s2) to
10781 strchr(s1, s2[0]). */
10782 return build_call_expr_loc (loc, fn, 2, s1,
10783 build_int_cst (integer_type_node, p2[0]));
10787 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10788 the call, and TYPE is its return type.
10790 Return NULL_TREE if no simplification was possible, otherwise return the
10791 simplified form of the call as a tree.
10793 The simplified form may be a constant or other expression which
10794 computes the same value, but in a more efficient manner (including
10795 calls to other builtin functions).
10797 The call may contain arguments which need to be evaluated, but
10798 which are not useful to determine the result of the call. In
10799 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10800 COMPOUND_EXPR will be an argument which must be evaluated.
10801 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10802 COMPOUND_EXPR in the chain will contain the tree for the simplified
10803 form of the builtin function call. */
10805 static tree
10806 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10808 if (!validate_arg (s1, POINTER_TYPE)
10809 || !validate_arg (s2, INTEGER_TYPE))
10810 return NULL_TREE;
10811 else
10813 const char *p1;
10815 if (TREE_CODE (s2) != INTEGER_CST)
10816 return NULL_TREE;
10818 p1 = c_getstr (s1);
10819 if (p1 != NULL)
10821 char c;
10822 const char *r;
10823 tree tem;
10825 if (target_char_cast (s2, &c))
10826 return NULL_TREE;
10828 r = strchr (p1, c);
10830 if (r == NULL)
10831 return build_int_cst (TREE_TYPE (s1), 0);
10833 /* Return an offset into the constant string argument. */
10834 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10835 return fold_convert_loc (loc, type, tem);
10837 return NULL_TREE;
10841 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10842 the call, and TYPE is its return type.
10844 Return NULL_TREE if no simplification was possible, otherwise return the
10845 simplified form of the call as a tree.
10847 The simplified form may be a constant or other expression which
10848 computes the same value, but in a more efficient manner (including
10849 calls to other builtin functions).
10851 The call may contain arguments which need to be evaluated, but
10852 which are not useful to determine the result of the call. In
10853 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10854 COMPOUND_EXPR will be an argument which must be evaluated.
10855 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10856 COMPOUND_EXPR in the chain will contain the tree for the simplified
10857 form of the builtin function call. */
10859 static tree
10860 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10862 if (!validate_arg (s1, POINTER_TYPE)
10863 || !validate_arg (s2, INTEGER_TYPE))
10864 return NULL_TREE;
10865 else
10867 tree fn;
10868 const char *p1;
10870 if (TREE_CODE (s2) != INTEGER_CST)
10871 return NULL_TREE;
10873 p1 = c_getstr (s1);
10874 if (p1 != NULL)
10876 char c;
10877 const char *r;
10878 tree tem;
10880 if (target_char_cast (s2, &c))
10881 return NULL_TREE;
10883 r = strrchr (p1, c);
10885 if (r == NULL)
10886 return build_int_cst (TREE_TYPE (s1), 0);
10888 /* Return an offset into the constant string argument. */
10889 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10890 return fold_convert_loc (loc, type, tem);
10893 if (! integer_zerop (s2))
10894 return NULL_TREE;
10896 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10897 if (!fn)
10898 return NULL_TREE;
10900 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10901 return build_call_expr_loc (loc, fn, 2, s1, s2);
10905 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10906 to the call, and TYPE is its return type.
10908 Return NULL_TREE if no simplification was possible, otherwise return the
10909 simplified form of the call as a tree.
10911 The simplified form may be a constant or other expression which
10912 computes the same value, but in a more efficient manner (including
10913 calls to other builtin functions).
10915 The call may contain arguments which need to be evaluated, but
10916 which are not useful to determine the result of the call. In
10917 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10918 COMPOUND_EXPR will be an argument which must be evaluated.
10919 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10920 COMPOUND_EXPR in the chain will contain the tree for the simplified
10921 form of the builtin function call. */
10923 static tree
10924 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10926 if (!validate_arg (s1, POINTER_TYPE)
10927 || !validate_arg (s2, POINTER_TYPE))
10928 return NULL_TREE;
10929 else
10931 tree fn;
10932 const char *p1, *p2;
10934 p2 = c_getstr (s2);
10935 if (p2 == NULL)
10936 return NULL_TREE;
10938 p1 = c_getstr (s1);
10939 if (p1 != NULL)
10941 const char *r = strpbrk (p1, p2);
10942 tree tem;
10944 if (r == NULL)
10945 return build_int_cst (TREE_TYPE (s1), 0);
10947 /* Return an offset into the constant string argument. */
10948 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10949 return fold_convert_loc (loc, type, tem);
10952 if (p2[0] == '\0')
10953 /* strpbrk(x, "") == NULL.
10954 Evaluate and ignore s1 in case it had side-effects. */
10955 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
10957 if (p2[1] != '\0')
10958 return NULL_TREE; /* Really call strpbrk. */
10960 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10961 if (!fn)
10962 return NULL_TREE;
10964 /* New argument list transforming strpbrk(s1, s2) to
10965 strchr(s1, s2[0]). */
10966 return build_call_expr_loc (loc, fn, 2, s1,
10967 build_int_cst (integer_type_node, p2[0]));
10971 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10972 arguments to the call.
10974 Return NULL_TREE if no simplification was possible, otherwise return the
10975 simplified form of the call as a tree.
10977 The simplified form may be a constant or other expression which
10978 computes the same value, but in a more efficient manner (including
10979 calls to other builtin functions).
10981 The call may contain arguments which need to be evaluated, but
10982 which are not useful to determine the result of the call. In
10983 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10984 COMPOUND_EXPR will be an argument which must be evaluated.
10985 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10986 COMPOUND_EXPR in the chain will contain the tree for the simplified
10987 form of the builtin function call. */
10989 static tree
10990 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
10992 if (!validate_arg (dst, POINTER_TYPE)
10993 || !validate_arg (src, POINTER_TYPE)
10994 || !validate_arg (len, INTEGER_TYPE))
10995 return NULL_TREE;
10996 else
10998 const char *p = c_getstr (src);
11000 /* If the requested length is zero, or the src parameter string
11001 length is zero, return the dst parameter. */
11002 if (integer_zerop (len) || (p && *p == '\0'))
11003 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11005 /* If the requested len is greater than or equal to the string
11006 length, call strcat. */
11007 if (TREE_CODE (len) == INTEGER_CST && p
11008 && compare_tree_int (len, strlen (p)) >= 0)
11010 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11012 /* If the replacement _DECL isn't initialized, don't do the
11013 transformation. */
11014 if (!fn)
11015 return NULL_TREE;
11017 return build_call_expr_loc (loc, fn, 2, dst, src);
11019 return NULL_TREE;
11023 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11024 to the call.
11026 Return NULL_TREE if no simplification was possible, otherwise return the
11027 simplified form of the call as a tree.
11029 The simplified form may be a constant or other expression which
11030 computes the same value, but in a more efficient manner (including
11031 calls to other builtin functions).
11033 The call may contain arguments which need to be evaluated, but
11034 which are not useful to determine the result of the call. In
11035 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11036 COMPOUND_EXPR will be an argument which must be evaluated.
11037 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11038 COMPOUND_EXPR in the chain will contain the tree for the simplified
11039 form of the builtin function call. */
11041 static tree
11042 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11044 if (!validate_arg (s1, POINTER_TYPE)
11045 || !validate_arg (s2, POINTER_TYPE))
11046 return NULL_TREE;
11047 else
11049 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11051 /* If both arguments are constants, evaluate at compile-time. */
11052 if (p1 && p2)
11054 const size_t r = strspn (p1, p2);
11055 return build_int_cst (size_type_node, r);
11058 /* If either argument is "", return NULL_TREE. */
11059 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11060 /* Evaluate and ignore both arguments in case either one has
11061 side-effects. */
11062 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11063 s1, s2);
11064 return NULL_TREE;
11068 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11069 to the call.
11071 Return NULL_TREE if no simplification was possible, otherwise return the
11072 simplified form of the call as a tree.
11074 The simplified form may be a constant or other expression which
11075 computes the same value, but in a more efficient manner (including
11076 calls to other builtin functions).
11078 The call may contain arguments which need to be evaluated, but
11079 which are not useful to determine the result of the call. In
11080 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11081 COMPOUND_EXPR will be an argument which must be evaluated.
11082 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11083 COMPOUND_EXPR in the chain will contain the tree for the simplified
11084 form of the builtin function call. */
11086 static tree
11087 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11089 if (!validate_arg (s1, POINTER_TYPE)
11090 || !validate_arg (s2, POINTER_TYPE))
11091 return NULL_TREE;
11092 else
11094 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11096 /* If both arguments are constants, evaluate at compile-time. */
11097 if (p1 && p2)
11099 const size_t r = strcspn (p1, p2);
11100 return build_int_cst (size_type_node, r);
11103 /* If the first argument is "", return NULL_TREE. */
11104 if (p1 && *p1 == '\0')
11106 /* Evaluate and ignore argument s2 in case it has
11107 side-effects. */
11108 return omit_one_operand_loc (loc, size_type_node,
11109 size_zero_node, s2);
11112 /* If the second argument is "", return __builtin_strlen(s1). */
11113 if (p2 && *p2 == '\0')
11115 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11117 /* If the replacement _DECL isn't initialized, don't do the
11118 transformation. */
11119 if (!fn)
11120 return NULL_TREE;
11122 return build_call_expr_loc (loc, fn, 1, s1);
11124 return NULL_TREE;
11128 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11129 produced. False otherwise. This is done so that we don't output the error
11130 or warning twice or three times. */
11132 bool
11133 fold_builtin_next_arg (tree exp, bool va_start_p)
11135 tree fntype = TREE_TYPE (current_function_decl);
11136 int nargs = call_expr_nargs (exp);
11137 tree arg;
11138 /* There is good chance the current input_location points inside the
11139 definition of the va_start macro (perhaps on the token for
11140 builtin) in a system header, so warnings will not be emitted.
11141 Use the location in real source code. */
11142 source_location current_location =
11143 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11144 NULL);
11146 if (!stdarg_p (fntype))
11148 error ("%<va_start%> used in function with fixed args");
11149 return true;
11152 if (va_start_p)
11154 if (va_start_p && (nargs != 2))
11156 error ("wrong number of arguments to function %<va_start%>");
11157 return true;
11159 arg = CALL_EXPR_ARG (exp, 1);
11161 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11162 when we checked the arguments and if needed issued a warning. */
11163 else
11165 if (nargs == 0)
11167 /* Evidently an out of date version of <stdarg.h>; can't validate
11168 va_start's second argument, but can still work as intended. */
11169 warning_at (current_location,
11170 OPT_Wvarargs,
11171 "%<__builtin_next_arg%> called without an argument");
11172 return true;
11174 else if (nargs > 1)
11176 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11177 return true;
11179 arg = CALL_EXPR_ARG (exp, 0);
11182 if (TREE_CODE (arg) == SSA_NAME)
11183 arg = SSA_NAME_VAR (arg);
11185 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11186 or __builtin_next_arg (0) the first time we see it, after checking
11187 the arguments and if needed issuing a warning. */
11188 if (!integer_zerop (arg))
11190 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11192 /* Strip off all nops for the sake of the comparison. This
11193 is not quite the same as STRIP_NOPS. It does more.
11194 We must also strip off INDIRECT_EXPR for C++ reference
11195 parameters. */
11196 while (CONVERT_EXPR_P (arg)
11197 || TREE_CODE (arg) == INDIRECT_REF)
11198 arg = TREE_OPERAND (arg, 0);
11199 if (arg != last_parm)
11201 /* FIXME: Sometimes with the tree optimizers we can get the
11202 not the last argument even though the user used the last
11203 argument. We just warn and set the arg to be the last
11204 argument so that we will get wrong-code because of
11205 it. */
11206 warning_at (current_location,
11207 OPT_Wvarargs,
11208 "second parameter of %<va_start%> not last named argument");
11211 /* Undefined by C99 7.15.1.4p4 (va_start):
11212 "If the parameter parmN is declared with the register storage
11213 class, with a function or array type, or with a type that is
11214 not compatible with the type that results after application of
11215 the default argument promotions, the behavior is undefined."
11217 else if (DECL_REGISTER (arg))
11219 warning_at (current_location,
11220 OPT_Wvarargs,
11221 "undefined behaviour when second parameter of "
11222 "%<va_start%> is declared with %<register%> storage");
11225 /* We want to verify the second parameter just once before the tree
11226 optimizers are run and then avoid keeping it in the tree,
11227 as otherwise we could warn even for correct code like:
11228 void foo (int i, ...)
11229 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11230 if (va_start_p)
11231 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11232 else
11233 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11235 return false;
11239 /* Expand a call EXP to __builtin_object_size. */
11241 static rtx
11242 expand_builtin_object_size (tree exp)
11244 tree ost;
11245 int object_size_type;
11246 tree fndecl = get_callee_fndecl (exp);
11248 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11250 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11251 exp, fndecl);
11252 expand_builtin_trap ();
11253 return const0_rtx;
11256 ost = CALL_EXPR_ARG (exp, 1);
11257 STRIP_NOPS (ost);
11259 if (TREE_CODE (ost) != INTEGER_CST
11260 || tree_int_cst_sgn (ost) < 0
11261 || compare_tree_int (ost, 3) > 0)
11263 error ("%Klast argument of %D is not integer constant between 0 and 3",
11264 exp, fndecl);
11265 expand_builtin_trap ();
11266 return const0_rtx;
11269 object_size_type = tree_to_shwi (ost);
11271 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11274 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11275 FCODE is the BUILT_IN_* to use.
11276 Return NULL_RTX if we failed; the caller should emit a normal call,
11277 otherwise try to get the result in TARGET, if convenient (and in
11278 mode MODE if that's convenient). */
11280 static rtx
11281 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11282 enum built_in_function fcode)
11284 tree dest, src, len, size;
11286 if (!validate_arglist (exp,
11287 POINTER_TYPE,
11288 fcode == BUILT_IN_MEMSET_CHK
11289 ? INTEGER_TYPE : POINTER_TYPE,
11290 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11291 return NULL_RTX;
11293 dest = CALL_EXPR_ARG (exp, 0);
11294 src = CALL_EXPR_ARG (exp, 1);
11295 len = CALL_EXPR_ARG (exp, 2);
11296 size = CALL_EXPR_ARG (exp, 3);
11298 if (! tree_fits_uhwi_p (size))
11299 return NULL_RTX;
11301 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11303 tree fn;
11305 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11307 warning_at (tree_nonartificial_location (exp),
11308 0, "%Kcall to %D will always overflow destination buffer",
11309 exp, get_callee_fndecl (exp));
11310 return NULL_RTX;
11313 fn = NULL_TREE;
11314 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11315 mem{cpy,pcpy,move,set} is available. */
11316 switch (fcode)
11318 case BUILT_IN_MEMCPY_CHK:
11319 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11320 break;
11321 case BUILT_IN_MEMPCPY_CHK:
11322 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11323 break;
11324 case BUILT_IN_MEMMOVE_CHK:
11325 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11326 break;
11327 case BUILT_IN_MEMSET_CHK:
11328 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11329 break;
11330 default:
11331 break;
11334 if (! fn)
11335 return NULL_RTX;
11337 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11338 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11339 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11340 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11342 else if (fcode == BUILT_IN_MEMSET_CHK)
11343 return NULL_RTX;
11344 else
11346 unsigned int dest_align = get_pointer_alignment (dest);
11348 /* If DEST is not a pointer type, call the normal function. */
11349 if (dest_align == 0)
11350 return NULL_RTX;
11352 /* If SRC and DEST are the same (and not volatile), do nothing. */
11353 if (operand_equal_p (src, dest, 0))
11355 tree expr;
11357 if (fcode != BUILT_IN_MEMPCPY_CHK)
11359 /* Evaluate and ignore LEN in case it has side-effects. */
11360 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11361 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11364 expr = fold_build_pointer_plus (dest, len);
11365 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11368 /* __memmove_chk special case. */
11369 if (fcode == BUILT_IN_MEMMOVE_CHK)
11371 unsigned int src_align = get_pointer_alignment (src);
11373 if (src_align == 0)
11374 return NULL_RTX;
11376 /* If src is categorized for a readonly section we can use
11377 normal __memcpy_chk. */
11378 if (readonly_data_expr (src))
11380 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11381 if (!fn)
11382 return NULL_RTX;
11383 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11384 dest, src, len, size);
11385 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11386 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11387 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11390 return NULL_RTX;
11394 /* Emit warning if a buffer overflow is detected at compile time. */
11396 static void
11397 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11399 int is_strlen = 0;
11400 tree len, size;
11401 location_t loc = tree_nonartificial_location (exp);
11403 switch (fcode)
11405 case BUILT_IN_STRCPY_CHK:
11406 case BUILT_IN_STPCPY_CHK:
11407 /* For __strcat_chk the warning will be emitted only if overflowing
11408 by at least strlen (dest) + 1 bytes. */
11409 case BUILT_IN_STRCAT_CHK:
11410 len = CALL_EXPR_ARG (exp, 1);
11411 size = CALL_EXPR_ARG (exp, 2);
11412 is_strlen = 1;
11413 break;
11414 case BUILT_IN_STRNCAT_CHK:
11415 case BUILT_IN_STRNCPY_CHK:
11416 case BUILT_IN_STPNCPY_CHK:
11417 len = CALL_EXPR_ARG (exp, 2);
11418 size = CALL_EXPR_ARG (exp, 3);
11419 break;
11420 case BUILT_IN_SNPRINTF_CHK:
11421 case BUILT_IN_VSNPRINTF_CHK:
11422 len = CALL_EXPR_ARG (exp, 1);
11423 size = CALL_EXPR_ARG (exp, 3);
11424 break;
11425 default:
11426 gcc_unreachable ();
11429 if (!len || !size)
11430 return;
11432 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11433 return;
11435 if (is_strlen)
11437 len = c_strlen (len, 1);
11438 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11439 return;
11441 else if (fcode == BUILT_IN_STRNCAT_CHK)
11443 tree src = CALL_EXPR_ARG (exp, 1);
11444 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11445 return;
11446 src = c_strlen (src, 1);
11447 if (! src || ! tree_fits_uhwi_p (src))
11449 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11450 exp, get_callee_fndecl (exp));
11451 return;
11453 else if (tree_int_cst_lt (src, size))
11454 return;
11456 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11457 return;
11459 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11460 exp, get_callee_fndecl (exp));
11463 /* Emit warning if a buffer overflow is detected at compile time
11464 in __sprintf_chk/__vsprintf_chk calls. */
11466 static void
11467 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11469 tree size, len, fmt;
11470 const char *fmt_str;
11471 int nargs = call_expr_nargs (exp);
11473 /* Verify the required arguments in the original call. */
11475 if (nargs < 4)
11476 return;
11477 size = CALL_EXPR_ARG (exp, 2);
11478 fmt = CALL_EXPR_ARG (exp, 3);
11480 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11481 return;
11483 /* Check whether the format is a literal string constant. */
11484 fmt_str = c_getstr (fmt);
11485 if (fmt_str == NULL)
11486 return;
11488 if (!init_target_chars ())
11489 return;
11491 /* If the format doesn't contain % args or %%, we know its size. */
11492 if (strchr (fmt_str, target_percent) == 0)
11493 len = build_int_cstu (size_type_node, strlen (fmt_str));
11494 /* If the format is "%s" and first ... argument is a string literal,
11495 we know it too. */
11496 else if (fcode == BUILT_IN_SPRINTF_CHK
11497 && strcmp (fmt_str, target_percent_s) == 0)
11499 tree arg;
11501 if (nargs < 5)
11502 return;
11503 arg = CALL_EXPR_ARG (exp, 4);
11504 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11505 return;
11507 len = c_strlen (arg, 1);
11508 if (!len || ! tree_fits_uhwi_p (len))
11509 return;
11511 else
11512 return;
11514 if (! tree_int_cst_lt (len, size))
11515 warning_at (tree_nonartificial_location (exp),
11516 0, "%Kcall to %D will always overflow destination buffer",
11517 exp, get_callee_fndecl (exp));
11520 /* Emit warning if a free is called with address of a variable. */
11522 static void
11523 maybe_emit_free_warning (tree exp)
11525 tree arg = CALL_EXPR_ARG (exp, 0);
11527 STRIP_NOPS (arg);
11528 if (TREE_CODE (arg) != ADDR_EXPR)
11529 return;
11531 arg = get_base_address (TREE_OPERAND (arg, 0));
11532 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11533 return;
11535 if (SSA_VAR_P (arg))
11536 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11537 "%Kattempt to free a non-heap object %qD", exp, arg);
11538 else
11539 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11540 "%Kattempt to free a non-heap object", exp);
11543 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11544 if possible. */
11546 static tree
11547 fold_builtin_object_size (tree ptr, tree ost)
11549 unsigned HOST_WIDE_INT bytes;
11550 int object_size_type;
11552 if (!validate_arg (ptr, POINTER_TYPE)
11553 || !validate_arg (ost, INTEGER_TYPE))
11554 return NULL_TREE;
11556 STRIP_NOPS (ost);
11558 if (TREE_CODE (ost) != INTEGER_CST
11559 || tree_int_cst_sgn (ost) < 0
11560 || compare_tree_int (ost, 3) > 0)
11561 return NULL_TREE;
11563 object_size_type = tree_to_shwi (ost);
11565 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11566 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11567 and (size_t) 0 for types 2 and 3. */
11568 if (TREE_SIDE_EFFECTS (ptr))
11569 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11571 if (TREE_CODE (ptr) == ADDR_EXPR)
11573 bytes = compute_builtin_object_size (ptr, object_size_type);
11574 if (wi::fits_to_tree_p (bytes, size_type_node))
11575 return build_int_cstu (size_type_node, bytes);
11577 else if (TREE_CODE (ptr) == SSA_NAME)
11579 /* If object size is not known yet, delay folding until
11580 later. Maybe subsequent passes will help determining
11581 it. */
11582 bytes = compute_builtin_object_size (ptr, object_size_type);
11583 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11584 && wi::fits_to_tree_p (bytes, size_type_node))
11585 return build_int_cstu (size_type_node, bytes);
11588 return NULL_TREE;
11591 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
11592 are the arguments to the call. */
11594 static tree
11595 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
11596 tree src, tree size)
11598 tree fn;
11599 const char *p;
11601 if (!validate_arg (dest, POINTER_TYPE)
11602 || !validate_arg (src, POINTER_TYPE)
11603 || !validate_arg (size, INTEGER_TYPE))
11604 return NULL_TREE;
11606 p = c_getstr (src);
11607 /* If the SRC parameter is "", return DEST. */
11608 if (p && *p == '\0')
11609 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11611 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
11612 return NULL_TREE;
11614 /* If __builtin_strcat_chk is used, assume strcat is available. */
11615 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
11616 if (!fn)
11617 return NULL_TREE;
11619 return build_call_expr_loc (loc, fn, 2, dest, src);
11622 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
11623 LEN, and SIZE. */
11625 static tree
11626 fold_builtin_strncat_chk (location_t loc, tree fndecl,
11627 tree dest, tree src, tree len, tree size)
11629 tree fn;
11630 const char *p;
11632 if (!validate_arg (dest, POINTER_TYPE)
11633 || !validate_arg (src, POINTER_TYPE)
11634 || !validate_arg (size, INTEGER_TYPE)
11635 || !validate_arg (size, INTEGER_TYPE))
11636 return NULL_TREE;
11638 p = c_getstr (src);
11639 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
11640 if (p && *p == '\0')
11641 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11642 else if (integer_zerop (len))
11643 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11645 if (! tree_fits_uhwi_p (size))
11646 return NULL_TREE;
11648 if (! integer_all_onesp (size))
11650 tree src_len = c_strlen (src, 1);
11651 if (src_len
11652 && tree_fits_uhwi_p (src_len)
11653 && tree_fits_uhwi_p (len)
11654 && ! tree_int_cst_lt (len, src_len))
11656 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
11657 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
11658 if (!fn)
11659 return NULL_TREE;
11661 return build_call_expr_loc (loc, fn, 3, dest, src, size);
11663 return NULL_TREE;
11666 /* If __builtin_strncat_chk is used, assume strncat is available. */
11667 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
11668 if (!fn)
11669 return NULL_TREE;
11671 return build_call_expr_loc (loc, fn, 3, dest, src, len);
11674 /* Builtins with folding operations that operate on "..." arguments
11675 need special handling; we need to store the arguments in a convenient
11676 data structure before attempting any folding. Fortunately there are
11677 only a few builtins that fall into this category. FNDECL is the
11678 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11679 result of the function call is ignored. */
11681 static tree
11682 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11683 bool ignore ATTRIBUTE_UNUSED)
11685 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11686 tree ret = NULL_TREE;
11688 switch (fcode)
11690 case BUILT_IN_FPCLASSIFY:
11691 ret = fold_builtin_fpclassify (loc, exp);
11692 break;
11694 default:
11695 break;
11697 if (ret)
11699 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11700 SET_EXPR_LOCATION (ret, loc);
11701 TREE_NO_WARNING (ret) = 1;
11702 return ret;
11704 return NULL_TREE;
11707 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
11708 FMT and ARG are the arguments to the call; we don't fold cases with
11709 more than 2 arguments, and ARG may be null if this is a 1-argument case.
11711 Return NULL_TREE if no simplification was possible, otherwise return the
11712 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11713 code of the function to be simplified. */
11715 static tree
11716 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
11717 tree arg, bool ignore,
11718 enum built_in_function fcode)
11720 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
11721 const char *fmt_str = NULL;
11723 /* If the return value is used, don't do the transformation. */
11724 if (! ignore)
11725 return NULL_TREE;
11727 /* Verify the required arguments in the original call. */
11728 if (!validate_arg (fmt, POINTER_TYPE))
11729 return NULL_TREE;
11731 /* Check whether the format is a literal string constant. */
11732 fmt_str = c_getstr (fmt);
11733 if (fmt_str == NULL)
11734 return NULL_TREE;
11736 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
11738 /* If we're using an unlocked function, assume the other
11739 unlocked functions exist explicitly. */
11740 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
11741 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
11743 else
11745 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
11746 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
11749 if (!init_target_chars ())
11750 return NULL_TREE;
11752 if (strcmp (fmt_str, target_percent_s) == 0
11753 || strchr (fmt_str, target_percent) == NULL)
11755 const char *str;
11757 if (strcmp (fmt_str, target_percent_s) == 0)
11759 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11760 return NULL_TREE;
11762 if (!arg || !validate_arg (arg, POINTER_TYPE))
11763 return NULL_TREE;
11765 str = c_getstr (arg);
11766 if (str == NULL)
11767 return NULL_TREE;
11769 else
11771 /* The format specifier doesn't contain any '%' characters. */
11772 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
11773 && arg)
11774 return NULL_TREE;
11775 str = fmt_str;
11778 /* If the string was "", printf does nothing. */
11779 if (str[0] == '\0')
11780 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
11782 /* If the string has length of 1, call putchar. */
11783 if (str[1] == '\0')
11785 /* Given printf("c"), (where c is any one character,)
11786 convert "c"[0] to an int and pass that to the replacement
11787 function. */
11788 newarg = build_int_cst (integer_type_node, str[0]);
11789 if (fn_putchar)
11790 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
11792 else
11794 /* If the string was "string\n", call puts("string"). */
11795 size_t len = strlen (str);
11796 if ((unsigned char)str[len - 1] == target_newline
11797 && (size_t) (int) len == len
11798 && (int) len > 0)
11800 char *newstr;
11801 tree offset_node, string_cst;
11803 /* Create a NUL-terminated string that's one char shorter
11804 than the original, stripping off the trailing '\n'. */
11805 newarg = build_string_literal (len, str);
11806 string_cst = string_constant (newarg, &offset_node);
11807 gcc_checking_assert (string_cst
11808 && (TREE_STRING_LENGTH (string_cst)
11809 == (int) len)
11810 && integer_zerop (offset_node)
11811 && (unsigned char)
11812 TREE_STRING_POINTER (string_cst)[len - 1]
11813 == target_newline);
11814 /* build_string_literal creates a new STRING_CST,
11815 modify it in place to avoid double copying. */
11816 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
11817 newstr[len - 1] = '\0';
11818 if (fn_puts)
11819 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
11821 else
11822 /* We'd like to arrange to call fputs(string,stdout) here,
11823 but we need stdout and don't have a way to get it yet. */
11824 return NULL_TREE;
11828 /* The other optimizations can be done only on the non-va_list variants. */
11829 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11830 return NULL_TREE;
11832 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
11833 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
11835 if (!arg || !validate_arg (arg, POINTER_TYPE))
11836 return NULL_TREE;
11837 if (fn_puts)
11838 call = build_call_expr_loc (loc, fn_puts, 1, arg);
11841 /* If the format specifier was "%c", call __builtin_putchar(arg). */
11842 else if (strcmp (fmt_str, target_percent_c) == 0)
11844 if (!arg || !validate_arg (arg, INTEGER_TYPE))
11845 return NULL_TREE;
11846 if (fn_putchar)
11847 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
11850 if (!call)
11851 return NULL_TREE;
11853 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
11856 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
11857 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
11858 more than 3 arguments, and ARG may be null in the 2-argument case.
11860 Return NULL_TREE if no simplification was possible, otherwise return the
11861 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11862 code of the function to be simplified. */
11864 static tree
11865 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
11866 tree fmt, tree arg, bool ignore,
11867 enum built_in_function fcode)
11869 tree fn_fputc, fn_fputs, call = NULL_TREE;
11870 const char *fmt_str = NULL;
11872 /* If the return value is used, don't do the transformation. */
11873 if (! ignore)
11874 return NULL_TREE;
11876 /* Verify the required arguments in the original call. */
11877 if (!validate_arg (fp, POINTER_TYPE))
11878 return NULL_TREE;
11879 if (!validate_arg (fmt, POINTER_TYPE))
11880 return NULL_TREE;
11882 /* Check whether the format is a literal string constant. */
11883 fmt_str = c_getstr (fmt);
11884 if (fmt_str == NULL)
11885 return NULL_TREE;
11887 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
11889 /* If we're using an unlocked function, assume the other
11890 unlocked functions exist explicitly. */
11891 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
11892 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
11894 else
11896 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
11897 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
11900 if (!init_target_chars ())
11901 return NULL_TREE;
11903 /* If the format doesn't contain % args or %%, use strcpy. */
11904 if (strchr (fmt_str, target_percent) == NULL)
11906 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
11907 && arg)
11908 return NULL_TREE;
11910 /* If the format specifier was "", fprintf does nothing. */
11911 if (fmt_str[0] == '\0')
11913 /* If FP has side-effects, just wait until gimplification is
11914 done. */
11915 if (TREE_SIDE_EFFECTS (fp))
11916 return NULL_TREE;
11918 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
11921 /* When "string" doesn't contain %, replace all cases of
11922 fprintf (fp, string) with fputs (string, fp). The fputs
11923 builtin will take care of special cases like length == 1. */
11924 if (fn_fputs)
11925 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
11928 /* The other optimizations can be done only on the non-va_list variants. */
11929 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
11930 return NULL_TREE;
11932 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
11933 else if (strcmp (fmt_str, target_percent_s) == 0)
11935 if (!arg || !validate_arg (arg, POINTER_TYPE))
11936 return NULL_TREE;
11937 if (fn_fputs)
11938 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
11941 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
11942 else if (strcmp (fmt_str, target_percent_c) == 0)
11944 if (!arg || !validate_arg (arg, INTEGER_TYPE))
11945 return NULL_TREE;
11946 if (fn_fputc)
11947 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
11950 if (!call)
11951 return NULL_TREE;
11952 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
11955 /* Initialize format string characters in the target charset. */
11957 bool
11958 init_target_chars (void)
11960 static bool init;
11961 if (!init)
11963 target_newline = lang_hooks.to_target_charset ('\n');
11964 target_percent = lang_hooks.to_target_charset ('%');
11965 target_c = lang_hooks.to_target_charset ('c');
11966 target_s = lang_hooks.to_target_charset ('s');
11967 if (target_newline == 0 || target_percent == 0 || target_c == 0
11968 || target_s == 0)
11969 return false;
11971 target_percent_c[0] = target_percent;
11972 target_percent_c[1] = target_c;
11973 target_percent_c[2] = '\0';
11975 target_percent_s[0] = target_percent;
11976 target_percent_s[1] = target_s;
11977 target_percent_s[2] = '\0';
11979 target_percent_s_newline[0] = target_percent;
11980 target_percent_s_newline[1] = target_s;
11981 target_percent_s_newline[2] = target_newline;
11982 target_percent_s_newline[3] = '\0';
11984 init = true;
11986 return true;
11989 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11990 and no overflow/underflow occurred. INEXACT is true if M was not
11991 exactly calculated. TYPE is the tree type for the result. This
11992 function assumes that you cleared the MPFR flags and then
11993 calculated M to see if anything subsequently set a flag prior to
11994 entering this function. Return NULL_TREE if any checks fail. */
11996 static tree
11997 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11999 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12000 overflow/underflow occurred. If -frounding-math, proceed iff the
12001 result of calling FUNC was exact. */
12002 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12003 && (!flag_rounding_math || !inexact))
12005 REAL_VALUE_TYPE rr;
12007 real_from_mpfr (&rr, m, type, GMP_RNDN);
12008 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12009 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12010 but the mpft_t is not, then we underflowed in the
12011 conversion. */
12012 if (real_isfinite (&rr)
12013 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12015 REAL_VALUE_TYPE rmode;
12017 real_convert (&rmode, TYPE_MODE (type), &rr);
12018 /* Proceed iff the specified mode can hold the value. */
12019 if (real_identical (&rmode, &rr))
12020 return build_real (type, rmode);
12023 return NULL_TREE;
12026 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12027 number and no overflow/underflow occurred. INEXACT is true if M
12028 was not exactly calculated. TYPE is the tree type for the result.
12029 This function assumes that you cleared the MPFR flags and then
12030 calculated M to see if anything subsequently set a flag prior to
12031 entering this function. Return NULL_TREE if any checks fail, if
12032 FORCE_CONVERT is true, then bypass the checks. */
12034 static tree
12035 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12037 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12038 overflow/underflow occurred. If -frounding-math, proceed iff the
12039 result of calling FUNC was exact. */
12040 if (force_convert
12041 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12042 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12043 && (!flag_rounding_math || !inexact)))
12045 REAL_VALUE_TYPE re, im;
12047 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12048 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12049 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12050 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12051 but the mpft_t is not, then we underflowed in the
12052 conversion. */
12053 if (force_convert
12054 || (real_isfinite (&re) && real_isfinite (&im)
12055 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12056 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12058 REAL_VALUE_TYPE re_mode, im_mode;
12060 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12061 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12062 /* Proceed iff the specified mode can hold the value. */
12063 if (force_convert
12064 || (real_identical (&re_mode, &re)
12065 && real_identical (&im_mode, &im)))
12066 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12067 build_real (TREE_TYPE (type), im_mode));
12070 return NULL_TREE;
12073 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12074 FUNC on it and return the resulting value as a tree with type TYPE.
12075 If MIN and/or MAX are not NULL, then the supplied ARG must be
12076 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12077 acceptable values, otherwise they are not. The mpfr precision is
12078 set to the precision of TYPE. We assume that function FUNC returns
12079 zero if the result could be calculated exactly within the requested
12080 precision. */
12082 static tree
12083 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12084 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12085 bool inclusive)
12087 tree result = NULL_TREE;
12089 STRIP_NOPS (arg);
12091 /* To proceed, MPFR must exactly represent the target floating point
12092 format, which only happens when the target base equals two. */
12093 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12094 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12096 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12098 if (real_isfinite (ra)
12099 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12100 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12102 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12103 const int prec = fmt->p;
12104 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12105 int inexact;
12106 mpfr_t m;
12108 mpfr_init2 (m, prec);
12109 mpfr_from_real (m, ra, GMP_RNDN);
12110 mpfr_clear_flags ();
12111 inexact = func (m, m, rnd);
12112 result = do_mpfr_ckconv (m, type, inexact);
12113 mpfr_clear (m);
12117 return result;
12120 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12121 FUNC on it and return the resulting value as a tree with type TYPE.
12122 The mpfr precision is set to the precision of TYPE. We assume that
12123 function FUNC returns zero if the result could be calculated
12124 exactly within the requested precision. */
12126 static tree
12127 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12128 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12130 tree result = NULL_TREE;
12132 STRIP_NOPS (arg1);
12133 STRIP_NOPS (arg2);
12135 /* To proceed, MPFR must exactly represent the target floating point
12136 format, which only happens when the target base equals two. */
12137 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12138 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12139 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12141 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12142 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12144 if (real_isfinite (ra1) && real_isfinite (ra2))
12146 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12147 const int prec = fmt->p;
12148 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12149 int inexact;
12150 mpfr_t m1, m2;
12152 mpfr_inits2 (prec, m1, m2, NULL);
12153 mpfr_from_real (m1, ra1, GMP_RNDN);
12154 mpfr_from_real (m2, ra2, GMP_RNDN);
12155 mpfr_clear_flags ();
12156 inexact = func (m1, m1, m2, rnd);
12157 result = do_mpfr_ckconv (m1, type, inexact);
12158 mpfr_clears (m1, m2, NULL);
12162 return result;
12165 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12166 FUNC on it and return the resulting value as a tree with type TYPE.
12167 The mpfr precision is set to the precision of TYPE. We assume that
12168 function FUNC returns zero if the result could be calculated
12169 exactly within the requested precision. */
12171 static tree
12172 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12173 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12175 tree result = NULL_TREE;
12177 STRIP_NOPS (arg1);
12178 STRIP_NOPS (arg2);
12179 STRIP_NOPS (arg3);
12181 /* To proceed, MPFR must exactly represent the target floating point
12182 format, which only happens when the target base equals two. */
12183 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12184 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12185 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12186 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12188 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12189 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12190 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12192 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12194 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12195 const int prec = fmt->p;
12196 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12197 int inexact;
12198 mpfr_t m1, m2, m3;
12200 mpfr_inits2 (prec, m1, m2, m3, NULL);
12201 mpfr_from_real (m1, ra1, GMP_RNDN);
12202 mpfr_from_real (m2, ra2, GMP_RNDN);
12203 mpfr_from_real (m3, ra3, GMP_RNDN);
12204 mpfr_clear_flags ();
12205 inexact = func (m1, m1, m2, m3, rnd);
12206 result = do_mpfr_ckconv (m1, type, inexact);
12207 mpfr_clears (m1, m2, m3, NULL);
12211 return result;
12214 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12215 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12216 If ARG_SINP and ARG_COSP are NULL then the result is returned
12217 as a complex value.
12218 The type is taken from the type of ARG and is used for setting the
12219 precision of the calculation and results. */
12221 static tree
12222 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12224 tree const type = TREE_TYPE (arg);
12225 tree result = NULL_TREE;
12227 STRIP_NOPS (arg);
12229 /* To proceed, MPFR must exactly represent the target floating point
12230 format, which only happens when the target base equals two. */
12231 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12232 && TREE_CODE (arg) == REAL_CST
12233 && !TREE_OVERFLOW (arg))
12235 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12237 if (real_isfinite (ra))
12239 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12240 const int prec = fmt->p;
12241 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12242 tree result_s, result_c;
12243 int inexact;
12244 mpfr_t m, ms, mc;
12246 mpfr_inits2 (prec, m, ms, mc, NULL);
12247 mpfr_from_real (m, ra, GMP_RNDN);
12248 mpfr_clear_flags ();
12249 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12250 result_s = do_mpfr_ckconv (ms, type, inexact);
12251 result_c = do_mpfr_ckconv (mc, type, inexact);
12252 mpfr_clears (m, ms, mc, NULL);
12253 if (result_s && result_c)
12255 /* If we are to return in a complex value do so. */
12256 if (!arg_sinp && !arg_cosp)
12257 return build_complex (build_complex_type (type),
12258 result_c, result_s);
12260 /* Dereference the sin/cos pointer arguments. */
12261 arg_sinp = build_fold_indirect_ref (arg_sinp);
12262 arg_cosp = build_fold_indirect_ref (arg_cosp);
12263 /* Proceed if valid pointer type were passed in. */
12264 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12265 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12267 /* Set the values. */
12268 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12269 result_s);
12270 TREE_SIDE_EFFECTS (result_s) = 1;
12271 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12272 result_c);
12273 TREE_SIDE_EFFECTS (result_c) = 1;
12274 /* Combine the assignments into a compound expr. */
12275 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12276 result_s, result_c));
12281 return result;
12284 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12285 two-argument mpfr order N Bessel function FUNC on them and return
12286 the resulting value as a tree with type TYPE. The mpfr precision
12287 is set to the precision of TYPE. We assume that function FUNC
12288 returns zero if the result could be calculated exactly within the
12289 requested precision. */
12290 static tree
12291 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12292 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12293 const REAL_VALUE_TYPE *min, bool inclusive)
12295 tree result = NULL_TREE;
12297 STRIP_NOPS (arg1);
12298 STRIP_NOPS (arg2);
12300 /* To proceed, MPFR must exactly represent the target floating point
12301 format, which only happens when the target base equals two. */
12302 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12303 && tree_fits_shwi_p (arg1)
12304 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12306 const HOST_WIDE_INT n = tree_to_shwi (arg1);
12307 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12309 if (n == (long)n
12310 && real_isfinite (ra)
12311 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12313 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12314 const int prec = fmt->p;
12315 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12316 int inexact;
12317 mpfr_t m;
12319 mpfr_init2 (m, prec);
12320 mpfr_from_real (m, ra, GMP_RNDN);
12321 mpfr_clear_flags ();
12322 inexact = func (m, n, m, rnd);
12323 result = do_mpfr_ckconv (m, type, inexact);
12324 mpfr_clear (m);
12328 return result;
12331 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12332 the pointer *(ARG_QUO) and return the result. The type is taken
12333 from the type of ARG0 and is used for setting the precision of the
12334 calculation and results. */
12336 static tree
12337 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12339 tree const type = TREE_TYPE (arg0);
12340 tree result = NULL_TREE;
12342 STRIP_NOPS (arg0);
12343 STRIP_NOPS (arg1);
12345 /* To proceed, MPFR must exactly represent the target floating point
12346 format, which only happens when the target base equals two. */
12347 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12348 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12349 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12351 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12352 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12354 if (real_isfinite (ra0) && real_isfinite (ra1))
12356 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12357 const int prec = fmt->p;
12358 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12359 tree result_rem;
12360 long integer_quo;
12361 mpfr_t m0, m1;
12363 mpfr_inits2 (prec, m0, m1, NULL);
12364 mpfr_from_real (m0, ra0, GMP_RNDN);
12365 mpfr_from_real (m1, ra1, GMP_RNDN);
12366 mpfr_clear_flags ();
12367 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12368 /* Remquo is independent of the rounding mode, so pass
12369 inexact=0 to do_mpfr_ckconv(). */
12370 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12371 mpfr_clears (m0, m1, NULL);
12372 if (result_rem)
12374 /* MPFR calculates quo in the host's long so it may
12375 return more bits in quo than the target int can hold
12376 if sizeof(host long) > sizeof(target int). This can
12377 happen even for native compilers in LP64 mode. In
12378 these cases, modulo the quo value with the largest
12379 number that the target int can hold while leaving one
12380 bit for the sign. */
12381 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12382 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12384 /* Dereference the quo pointer argument. */
12385 arg_quo = build_fold_indirect_ref (arg_quo);
12386 /* Proceed iff a valid pointer type was passed in. */
12387 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12389 /* Set the value. */
12390 tree result_quo
12391 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12392 build_int_cst (TREE_TYPE (arg_quo),
12393 integer_quo));
12394 TREE_SIDE_EFFECTS (result_quo) = 1;
12395 /* Combine the quo assignment with the rem. */
12396 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12397 result_quo, result_rem));
12402 return result;
12405 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12406 resulting value as a tree with type TYPE. The mpfr precision is
12407 set to the precision of TYPE. We assume that this mpfr function
12408 returns zero if the result could be calculated exactly within the
12409 requested precision. In addition, the integer pointer represented
12410 by ARG_SG will be dereferenced and set to the appropriate signgam
12411 (-1,1) value. */
12413 static tree
12414 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12416 tree result = NULL_TREE;
12418 STRIP_NOPS (arg);
12420 /* To proceed, MPFR must exactly represent the target floating point
12421 format, which only happens when the target base equals two. Also
12422 verify ARG is a constant and that ARG_SG is an int pointer. */
12423 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12424 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12425 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12426 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12428 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12430 /* In addition to NaN and Inf, the argument cannot be zero or a
12431 negative integer. */
12432 if (real_isfinite (ra)
12433 && ra->cl != rvc_zero
12434 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12436 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12437 const int prec = fmt->p;
12438 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12439 int inexact, sg;
12440 mpfr_t m;
12441 tree result_lg;
12443 mpfr_init2 (m, prec);
12444 mpfr_from_real (m, ra, GMP_RNDN);
12445 mpfr_clear_flags ();
12446 inexact = mpfr_lgamma (m, &sg, m, rnd);
12447 result_lg = do_mpfr_ckconv (m, type, inexact);
12448 mpfr_clear (m);
12449 if (result_lg)
12451 tree result_sg;
12453 /* Dereference the arg_sg pointer argument. */
12454 arg_sg = build_fold_indirect_ref (arg_sg);
12455 /* Assign the signgam value into *arg_sg. */
12456 result_sg = fold_build2 (MODIFY_EXPR,
12457 TREE_TYPE (arg_sg), arg_sg,
12458 build_int_cst (TREE_TYPE (arg_sg), sg));
12459 TREE_SIDE_EFFECTS (result_sg) = 1;
12460 /* Combine the signgam assignment with the lgamma result. */
12461 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12462 result_sg, result_lg));
12467 return result;
12470 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12471 function FUNC on it and return the resulting value as a tree with
12472 type TYPE. The mpfr precision is set to the precision of TYPE. We
12473 assume that function FUNC returns zero if the result could be
12474 calculated exactly within the requested precision. */
12476 static tree
12477 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12479 tree result = NULL_TREE;
12481 STRIP_NOPS (arg);
12483 /* To proceed, MPFR must exactly represent the target floating point
12484 format, which only happens when the target base equals two. */
12485 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12486 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12487 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12489 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12490 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12492 if (real_isfinite (re) && real_isfinite (im))
12494 const struct real_format *const fmt =
12495 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12496 const int prec = fmt->p;
12497 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12498 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12499 int inexact;
12500 mpc_t m;
12502 mpc_init2 (m, prec);
12503 mpfr_from_real (mpc_realref (m), re, rnd);
12504 mpfr_from_real (mpc_imagref (m), im, rnd);
12505 mpfr_clear_flags ();
12506 inexact = func (m, m, crnd);
12507 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12508 mpc_clear (m);
12512 return result;
12515 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12516 mpc function FUNC on it and return the resulting value as a tree
12517 with type TYPE. The mpfr precision is set to the precision of
12518 TYPE. We assume that function FUNC returns zero if the result
12519 could be calculated exactly within the requested precision. If
12520 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12521 in the arguments and/or results. */
12523 tree
12524 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12525 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12527 tree result = NULL_TREE;
12529 STRIP_NOPS (arg0);
12530 STRIP_NOPS (arg1);
12532 /* To proceed, MPFR must exactly represent the target floating point
12533 format, which only happens when the target base equals two. */
12534 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12535 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12536 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12537 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12538 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12540 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12541 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12542 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12543 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12545 if (do_nonfinite
12546 || (real_isfinite (re0) && real_isfinite (im0)
12547 && real_isfinite (re1) && real_isfinite (im1)))
12549 const struct real_format *const fmt =
12550 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12551 const int prec = fmt->p;
12552 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12553 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12554 int inexact;
12555 mpc_t m0, m1;
12557 mpc_init2 (m0, prec);
12558 mpc_init2 (m1, prec);
12559 mpfr_from_real (mpc_realref (m0), re0, rnd);
12560 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12561 mpfr_from_real (mpc_realref (m1), re1, rnd);
12562 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12563 mpfr_clear_flags ();
12564 inexact = func (m0, m0, m1, crnd);
12565 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12566 mpc_clear (m0);
12567 mpc_clear (m1);
12571 return result;
12574 /* A wrapper function for builtin folding that prevents warnings for
12575 "statement without effect" and the like, caused by removing the
12576 call node earlier than the warning is generated. */
12578 tree
12579 fold_call_stmt (gimple stmt, bool ignore)
12581 tree ret = NULL_TREE;
12582 tree fndecl = gimple_call_fndecl (stmt);
12583 location_t loc = gimple_location (stmt);
12584 if (fndecl
12585 && TREE_CODE (fndecl) == FUNCTION_DECL
12586 && DECL_BUILT_IN (fndecl)
12587 && !gimple_call_va_arg_pack_p (stmt))
12589 int nargs = gimple_call_num_args (stmt);
12590 tree *args = (nargs > 0
12591 ? gimple_call_arg_ptr (stmt, 0)
12592 : &error_mark_node);
12594 if (avoid_folding_inline_builtin (fndecl))
12595 return NULL_TREE;
12596 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12598 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12600 else
12602 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
12603 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12604 if (ret)
12606 /* Propagate location information from original call to
12607 expansion of builtin. Otherwise things like
12608 maybe_emit_chk_warning, that operate on the expansion
12609 of a builtin, will use the wrong location information. */
12610 if (gimple_has_location (stmt))
12612 tree realret = ret;
12613 if (TREE_CODE (ret) == NOP_EXPR)
12614 realret = TREE_OPERAND (ret, 0);
12615 if (CAN_HAVE_LOCATION_P (realret)
12616 && !EXPR_HAS_LOCATION (realret))
12617 SET_EXPR_LOCATION (realret, loc);
12618 return realret;
12620 return ret;
12624 return NULL_TREE;
12627 /* Look up the function in builtin_decl that corresponds to DECL
12628 and set ASMSPEC as its user assembler name. DECL must be a
12629 function decl that declares a builtin. */
12631 void
12632 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12634 tree builtin;
12635 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12636 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12637 && asmspec != 0);
12639 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12640 set_user_assembler_name (builtin, asmspec);
12641 switch (DECL_FUNCTION_CODE (decl))
12643 case BUILT_IN_MEMCPY:
12644 init_block_move_fn (asmspec);
12645 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12646 break;
12647 case BUILT_IN_MEMSET:
12648 init_block_clear_fn (asmspec);
12649 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12650 break;
12651 case BUILT_IN_MEMMOVE:
12652 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12653 break;
12654 case BUILT_IN_MEMCMP:
12655 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12656 break;
12657 case BUILT_IN_ABORT:
12658 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12659 break;
12660 case BUILT_IN_FFS:
12661 if (INT_TYPE_SIZE < BITS_PER_WORD)
12663 set_user_assembler_libfunc ("ffs", asmspec);
12664 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12665 MODE_INT, 0), "ffs");
12667 break;
12668 default:
12669 break;
12673 /* Return true if DECL is a builtin that expands to a constant or similarly
12674 simple code. */
12675 bool
12676 is_simple_builtin (tree decl)
12678 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12679 switch (DECL_FUNCTION_CODE (decl))
12681 /* Builtins that expand to constants. */
12682 case BUILT_IN_CONSTANT_P:
12683 case BUILT_IN_EXPECT:
12684 case BUILT_IN_OBJECT_SIZE:
12685 case BUILT_IN_UNREACHABLE:
12686 /* Simple register moves or loads from stack. */
12687 case BUILT_IN_ASSUME_ALIGNED:
12688 case BUILT_IN_RETURN_ADDRESS:
12689 case BUILT_IN_EXTRACT_RETURN_ADDR:
12690 case BUILT_IN_FROB_RETURN_ADDR:
12691 case BUILT_IN_RETURN:
12692 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12693 case BUILT_IN_FRAME_ADDRESS:
12694 case BUILT_IN_VA_END:
12695 case BUILT_IN_STACK_SAVE:
12696 case BUILT_IN_STACK_RESTORE:
12697 /* Exception state returns or moves registers around. */
12698 case BUILT_IN_EH_FILTER:
12699 case BUILT_IN_EH_POINTER:
12700 case BUILT_IN_EH_COPY_VALUES:
12701 return true;
12703 default:
12704 return false;
12707 return false;
12710 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12711 most probably expanded inline into reasonably simple code. This is a
12712 superset of is_simple_builtin. */
12713 bool
12714 is_inexpensive_builtin (tree decl)
12716 if (!decl)
12717 return false;
12718 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12719 return true;
12720 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12721 switch (DECL_FUNCTION_CODE (decl))
12723 case BUILT_IN_ABS:
12724 case BUILT_IN_ALLOCA:
12725 case BUILT_IN_ALLOCA_WITH_ALIGN:
12726 case BUILT_IN_BSWAP16:
12727 case BUILT_IN_BSWAP32:
12728 case BUILT_IN_BSWAP64:
12729 case BUILT_IN_CLZ:
12730 case BUILT_IN_CLZIMAX:
12731 case BUILT_IN_CLZL:
12732 case BUILT_IN_CLZLL:
12733 case BUILT_IN_CTZ:
12734 case BUILT_IN_CTZIMAX:
12735 case BUILT_IN_CTZL:
12736 case BUILT_IN_CTZLL:
12737 case BUILT_IN_FFS:
12738 case BUILT_IN_FFSIMAX:
12739 case BUILT_IN_FFSL:
12740 case BUILT_IN_FFSLL:
12741 case BUILT_IN_IMAXABS:
12742 case BUILT_IN_FINITE:
12743 case BUILT_IN_FINITEF:
12744 case BUILT_IN_FINITEL:
12745 case BUILT_IN_FINITED32:
12746 case BUILT_IN_FINITED64:
12747 case BUILT_IN_FINITED128:
12748 case BUILT_IN_FPCLASSIFY:
12749 case BUILT_IN_ISFINITE:
12750 case BUILT_IN_ISINF_SIGN:
12751 case BUILT_IN_ISINF:
12752 case BUILT_IN_ISINFF:
12753 case BUILT_IN_ISINFL:
12754 case BUILT_IN_ISINFD32:
12755 case BUILT_IN_ISINFD64:
12756 case BUILT_IN_ISINFD128:
12757 case BUILT_IN_ISNAN:
12758 case BUILT_IN_ISNANF:
12759 case BUILT_IN_ISNANL:
12760 case BUILT_IN_ISNAND32:
12761 case BUILT_IN_ISNAND64:
12762 case BUILT_IN_ISNAND128:
12763 case BUILT_IN_ISNORMAL:
12764 case BUILT_IN_ISGREATER:
12765 case BUILT_IN_ISGREATEREQUAL:
12766 case BUILT_IN_ISLESS:
12767 case BUILT_IN_ISLESSEQUAL:
12768 case BUILT_IN_ISLESSGREATER:
12769 case BUILT_IN_ISUNORDERED:
12770 case BUILT_IN_VA_ARG_PACK:
12771 case BUILT_IN_VA_ARG_PACK_LEN:
12772 case BUILT_IN_VA_COPY:
12773 case BUILT_IN_TRAP:
12774 case BUILT_IN_SAVEREGS:
12775 case BUILT_IN_POPCOUNTL:
12776 case BUILT_IN_POPCOUNTLL:
12777 case BUILT_IN_POPCOUNTIMAX:
12778 case BUILT_IN_POPCOUNT:
12779 case BUILT_IN_PARITYL:
12780 case BUILT_IN_PARITYLL:
12781 case BUILT_IN_PARITYIMAX:
12782 case BUILT_IN_PARITY:
12783 case BUILT_IN_LABS:
12784 case BUILT_IN_LLABS:
12785 case BUILT_IN_PREFETCH:
12786 return true;
12788 default:
12789 return is_simple_builtin (decl);
12792 return false;