* gcc.dg/store-motion-fgcse-sm.c (dg-final): Cleanup
[official-gcc.git] / gcc / builtins.c
blob7766da7c448dbb60ac6bf895e0e208af8df9cb7d
1 /* Expand builtin functions.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "calls.h"
30 #include "varasm.h"
31 #include "tree-object-size.h"
32 #include "realmpfr.h"
33 #include "predict.h"
34 #include "vec.h"
35 #include "hashtab.h"
36 #include "hash-set.h"
37 #include "hard-reg-set.h"
38 #include "input.h"
39 #include "function.h"
40 #include "cfgrtl.h"
41 #include "basic-block.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-expr.h"
45 #include "is-a.h"
46 #include "gimple.h"
47 #include "flags.h"
48 #include "regs.h"
49 #include "except.h"
50 #include "insn-config.h"
51 #include "expr.h"
52 #include "insn-codes.h"
53 #include "optabs.h"
54 #include "libfuncs.h"
55 #include "recog.h"
56 #include "output.h"
57 #include "typeclass.h"
58 #include "tm_p.h"
59 #include "target.h"
60 #include "langhooks.h"
61 #include "tree-ssanames.h"
62 #include "tree-dfa.h"
63 #include "value-prof.h"
64 #include "diagnostic-core.h"
65 #include "builtins.h"
66 #include "asan.h"
67 #include "cilk.h"
68 #include "ipa-ref.h"
69 #include "lto-streamer.h"
70 #include "cgraph.h"
71 #include "tree-chkp.h"
72 #include "rtl-chkp.h"
75 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
77 struct target_builtins default_target_builtins;
78 #if SWITCHABLE_TARGET
79 struct target_builtins *this_target_builtins = &default_target_builtins;
80 #endif
82 /* Define the names of the builtin function types and codes. */
83 const char *const built_in_class_names[BUILT_IN_LAST]
84 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
86 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
87 const char * built_in_names[(int) END_BUILTINS] =
89 #include "builtins.def"
91 #undef DEF_BUILTIN
93 /* Setup an array of _DECL trees, make sure each element is
94 initialized to NULL_TREE. */
95 builtin_info_type builtin_info;
97 /* Non-zero if __builtin_constant_p should be folded right away. */
98 bool force_folding_builtin_constant_p;
100 static rtx c_readstr (const char *, machine_mode);
101 static int target_char_cast (tree, char *);
102 static rtx get_memory_rtx (tree, tree);
103 static int apply_args_size (void);
104 static int apply_result_size (void);
105 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
106 static rtx result_vector (int, rtx);
107 #endif
108 static void expand_builtin_update_setjmp_buf (rtx);
109 static void expand_builtin_prefetch (tree);
110 static rtx expand_builtin_apply_args (void);
111 static rtx expand_builtin_apply_args_1 (void);
112 static rtx expand_builtin_apply (rtx, rtx, rtx);
113 static void expand_builtin_return (rtx);
114 static enum type_class type_to_class (tree);
115 static rtx expand_builtin_classify_type (tree);
116 static void expand_errno_check (tree, rtx);
117 static rtx expand_builtin_mathfn (tree, rtx, rtx);
118 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
119 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
120 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
121 static rtx expand_builtin_interclass_mathfn (tree, rtx);
122 static rtx expand_builtin_sincos (tree);
123 static rtx expand_builtin_cexpi (tree, rtx);
124 static rtx expand_builtin_int_roundingfn (tree, rtx);
125 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
126 static rtx expand_builtin_next_arg (void);
127 static rtx expand_builtin_va_start (tree);
128 static rtx expand_builtin_va_end (tree);
129 static rtx expand_builtin_va_copy (tree);
130 static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
131 static rtx expand_builtin_strcmp (tree, rtx);
132 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
133 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
134 static rtx expand_builtin_memcpy (tree, rtx);
135 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
136 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
137 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
138 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
139 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
140 machine_mode, int, tree);
141 static rtx expand_builtin_strcpy (tree, rtx);
142 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
143 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
144 static rtx expand_builtin_strncpy (tree, rtx);
145 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
146 static rtx expand_builtin_memset (tree, rtx, machine_mode);
147 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
148 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
149 static rtx expand_builtin_bzero (tree);
150 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
151 static rtx expand_builtin_alloca (tree, bool);
152 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
153 static rtx expand_builtin_frame_address (tree, tree);
154 static tree stabilize_va_list_loc (location_t, tree, int);
155 static rtx expand_builtin_expect (tree, rtx);
156 static tree fold_builtin_constant_p (tree);
157 static tree fold_builtin_classify_type (tree);
158 static tree fold_builtin_strlen (location_t, tree, tree);
159 static tree fold_builtin_inf (location_t, tree, int);
160 static tree fold_builtin_nan (tree, tree, int);
161 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
162 static bool validate_arg (const_tree, enum tree_code code);
163 static bool integer_valued_real_p (tree);
164 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
165 static rtx expand_builtin_fabs (tree, rtx, rtx);
166 static rtx expand_builtin_signbit (tree, rtx);
167 static tree fold_builtin_sqrt (location_t, tree, tree);
168 static tree fold_builtin_cbrt (location_t, tree, tree);
169 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
170 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_cos (location_t, tree, tree, tree);
172 static tree fold_builtin_cosh (location_t, tree, tree, tree);
173 static tree fold_builtin_tan (tree, tree);
174 static tree fold_builtin_trunc (location_t, tree, tree);
175 static tree fold_builtin_floor (location_t, tree, tree);
176 static tree fold_builtin_ceil (location_t, tree, tree);
177 static tree fold_builtin_round (location_t, tree, tree);
178 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
179 static tree fold_builtin_bitop (tree, tree);
180 static tree fold_builtin_strchr (location_t, tree, tree, tree);
181 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
182 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
183 static tree fold_builtin_strcmp (location_t, tree, tree);
184 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
185 static tree fold_builtin_signbit (location_t, tree, tree);
186 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
187 static tree fold_builtin_isascii (location_t, tree);
188 static tree fold_builtin_toascii (location_t, tree);
189 static tree fold_builtin_isdigit (location_t, tree);
190 static tree fold_builtin_fabs (location_t, tree, tree);
191 static tree fold_builtin_abs (location_t, tree, tree);
192 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
193 enum tree_code);
194 static tree fold_builtin_0 (location_t, tree, bool);
195 static tree fold_builtin_1 (location_t, tree, tree, bool);
196 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
197 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
198 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
199 static tree fold_builtin_varargs (location_t, tree, tree, bool);
201 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
202 static tree fold_builtin_strstr (location_t, tree, tree, tree);
203 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
204 static tree fold_builtin_strncat (location_t, tree, tree, tree);
205 static tree fold_builtin_strspn (location_t, tree, tree);
206 static tree fold_builtin_strcspn (location_t, tree, tree);
208 static rtx expand_builtin_object_size (tree);
209 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
210 enum built_in_function);
211 static void maybe_emit_chk_warning (tree, enum built_in_function);
212 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
213 static void maybe_emit_free_warning (tree);
214 static tree fold_builtin_object_size (tree, tree);
215 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
216 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
217 enum built_in_function);
219 static unsigned HOST_WIDE_INT target_newline;
220 unsigned HOST_WIDE_INT target_percent;
221 static unsigned HOST_WIDE_INT target_c;
222 static unsigned HOST_WIDE_INT target_s;
223 static char target_percent_c[3];
224 char target_percent_s[3];
225 static char target_percent_s_newline[4];
226 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_arg2 (tree, tree, tree,
229 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
230 static tree do_mpfr_arg3 (tree, tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_sincos (tree, tree, tree);
233 static tree do_mpfr_bessel_n (tree, tree, tree,
234 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
235 const REAL_VALUE_TYPE *, bool);
236 static tree do_mpfr_remquo (tree, tree, tree);
237 static tree do_mpfr_lgamma_r (tree, tree, tree);
238 static void expand_builtin_sync_synchronize (void);
240 /* Return true if NAME starts with __builtin_ or __sync_. */
242 static bool
243 is_builtin_name (const char *name)
245 if (strncmp (name, "__builtin_", 10) == 0)
246 return true;
247 if (strncmp (name, "__sync_", 7) == 0)
248 return true;
249 if (strncmp (name, "__atomic_", 9) == 0)
250 return true;
251 if (flag_cilkplus
252 && (!strcmp (name, "__cilkrts_detach")
253 || !strcmp (name, "__cilkrts_pop_frame")))
254 return true;
255 return false;
259 /* Return true if DECL is a function symbol representing a built-in. */
261 bool
262 is_builtin_fn (tree decl)
264 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
267 /* Return true if NODE should be considered for inline expansion regardless
268 of the optimization level. This means whenever a function is invoked with
269 its "internal" name, which normally contains the prefix "__builtin". */
271 static bool
272 called_as_built_in (tree node)
274 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
275 we want the name used to call the function, not the name it
276 will have. */
277 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
278 return is_builtin_name (name);
281 /* Compute values M and N such that M divides (address of EXP - N) and such
282 that N < M. If these numbers can be determined, store M in alignp and N in
283 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
284 *alignp and any bit-offset to *bitposp.
286 Note that the address (and thus the alignment) computed here is based
287 on the address to which a symbol resolves, whereas DECL_ALIGN is based
288 on the address at which an object is actually located. These two
289 addresses are not always the same. For example, on ARM targets,
290 the address &foo of a Thumb function foo() has the lowest bit set,
291 whereas foo() itself starts on an even address.
293 If ADDR_P is true we are taking the address of the memory reference EXP
294 and thus cannot rely on the access taking place. */
296 static bool
297 get_object_alignment_2 (tree exp, unsigned int *alignp,
298 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
300 HOST_WIDE_INT bitsize, bitpos;
301 tree offset;
302 machine_mode mode;
303 int unsignedp, volatilep;
304 unsigned int align = BITS_PER_UNIT;
305 bool known_alignment = false;
307 /* Get the innermost object and the constant (bitpos) and possibly
308 variable (offset) offset of the access. */
309 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
310 &mode, &unsignedp, &volatilep, true);
312 /* Extract alignment information from the innermost object and
313 possibly adjust bitpos and offset. */
314 if (TREE_CODE (exp) == FUNCTION_DECL)
316 /* Function addresses can encode extra information besides their
317 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
318 allows the low bit to be used as a virtual bit, we know
319 that the address itself must be at least 2-byte aligned. */
320 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
321 align = 2 * BITS_PER_UNIT;
323 else if (TREE_CODE (exp) == LABEL_DECL)
325 else if (TREE_CODE (exp) == CONST_DECL)
327 /* The alignment of a CONST_DECL is determined by its initializer. */
328 exp = DECL_INITIAL (exp);
329 align = TYPE_ALIGN (TREE_TYPE (exp));
330 #ifdef CONSTANT_ALIGNMENT
331 if (CONSTANT_CLASS_P (exp))
332 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
333 #endif
334 known_alignment = true;
336 else if (DECL_P (exp))
338 align = DECL_ALIGN (exp);
339 known_alignment = true;
341 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
343 align = TYPE_ALIGN (TREE_TYPE (exp));
345 else if (TREE_CODE (exp) == INDIRECT_REF
346 || TREE_CODE (exp) == MEM_REF
347 || TREE_CODE (exp) == TARGET_MEM_REF)
349 tree addr = TREE_OPERAND (exp, 0);
350 unsigned ptr_align;
351 unsigned HOST_WIDE_INT ptr_bitpos;
353 if (TREE_CODE (addr) == BIT_AND_EXPR
354 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
356 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
357 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
358 align *= BITS_PER_UNIT;
359 addr = TREE_OPERAND (addr, 0);
362 known_alignment
363 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
364 align = MAX (ptr_align, align);
366 /* The alignment of the pointer operand in a TARGET_MEM_REF
367 has to take the variable offset parts into account. */
368 if (TREE_CODE (exp) == TARGET_MEM_REF)
370 if (TMR_INDEX (exp))
372 unsigned HOST_WIDE_INT step = 1;
373 if (TMR_STEP (exp))
374 step = TREE_INT_CST_LOW (TMR_STEP (exp));
375 align = MIN (align, (step & -step) * BITS_PER_UNIT);
377 if (TMR_INDEX2 (exp))
378 align = BITS_PER_UNIT;
379 known_alignment = false;
382 /* When EXP is an actual memory reference then we can use
383 TYPE_ALIGN of a pointer indirection to derive alignment.
384 Do so only if get_pointer_alignment_1 did not reveal absolute
385 alignment knowledge and if using that alignment would
386 improve the situation. */
387 if (!addr_p && !known_alignment
388 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
389 align = TYPE_ALIGN (TREE_TYPE (exp));
390 else
392 /* Else adjust bitpos accordingly. */
393 bitpos += ptr_bitpos;
394 if (TREE_CODE (exp) == MEM_REF
395 || TREE_CODE (exp) == TARGET_MEM_REF)
396 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
399 else if (TREE_CODE (exp) == STRING_CST)
401 /* STRING_CST are the only constant objects we allow to be not
402 wrapped inside a CONST_DECL. */
403 align = TYPE_ALIGN (TREE_TYPE (exp));
404 #ifdef CONSTANT_ALIGNMENT
405 if (CONSTANT_CLASS_P (exp))
406 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
407 #endif
408 known_alignment = true;
411 /* If there is a non-constant offset part extract the maximum
412 alignment that can prevail. */
413 if (offset)
415 unsigned int trailing_zeros = tree_ctz (offset);
416 if (trailing_zeros < HOST_BITS_PER_INT)
418 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
419 if (inner)
420 align = MIN (align, inner);
424 *alignp = align;
425 *bitposp = bitpos & (*alignp - 1);
426 return known_alignment;
429 /* For a memory reference expression EXP compute values M and N such that M
430 divides (&EXP - N) and such that N < M. If these numbers can be determined,
431 store M in alignp and N in *BITPOSP and return true. Otherwise return false
432 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
434 bool
435 get_object_alignment_1 (tree exp, unsigned int *alignp,
436 unsigned HOST_WIDE_INT *bitposp)
438 return get_object_alignment_2 (exp, alignp, bitposp, false);
441 /* Return the alignment in bits of EXP, an object. */
443 unsigned int
444 get_object_alignment (tree exp)
446 unsigned HOST_WIDE_INT bitpos = 0;
447 unsigned int align;
449 get_object_alignment_1 (exp, &align, &bitpos);
451 /* align and bitpos now specify known low bits of the pointer.
452 ptr & (align - 1) == bitpos. */
454 if (bitpos != 0)
455 align = (bitpos & -bitpos);
456 return align;
459 /* For a pointer valued expression EXP compute values M and N such that M
460 divides (EXP - N) and such that N < M. If these numbers can be determined,
461 store M in alignp and N in *BITPOSP and return true. Return false if
462 the results are just a conservative approximation.
464 If EXP is not a pointer, false is returned too. */
466 bool
467 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
468 unsigned HOST_WIDE_INT *bitposp)
470 STRIP_NOPS (exp);
472 if (TREE_CODE (exp) == ADDR_EXPR)
473 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
474 alignp, bitposp, true);
475 else if (TREE_CODE (exp) == SSA_NAME
476 && POINTER_TYPE_P (TREE_TYPE (exp)))
478 unsigned int ptr_align, ptr_misalign;
479 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
481 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
483 *bitposp = ptr_misalign * BITS_PER_UNIT;
484 *alignp = ptr_align * BITS_PER_UNIT;
485 /* We cannot really tell whether this result is an approximation. */
486 return true;
488 else
490 *bitposp = 0;
491 *alignp = BITS_PER_UNIT;
492 return false;
495 else if (TREE_CODE (exp) == INTEGER_CST)
497 *alignp = BIGGEST_ALIGNMENT;
498 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
499 & (BIGGEST_ALIGNMENT - 1));
500 return true;
503 *bitposp = 0;
504 *alignp = BITS_PER_UNIT;
505 return false;
508 /* Return the alignment in bits of EXP, a pointer valued expression.
509 The alignment returned is, by default, the alignment of the thing that
510 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
512 Otherwise, look at the expression to see if we can do better, i.e., if the
513 expression is actually pointing at an object whose alignment is tighter. */
515 unsigned int
516 get_pointer_alignment (tree exp)
518 unsigned HOST_WIDE_INT bitpos = 0;
519 unsigned int align;
521 get_pointer_alignment_1 (exp, &align, &bitpos);
523 /* align and bitpos now specify known low bits of the pointer.
524 ptr & (align - 1) == bitpos. */
526 if (bitpos != 0)
527 align = (bitpos & -bitpos);
529 return align;
532 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
533 way, because it could contain a zero byte in the middle.
534 TREE_STRING_LENGTH is the size of the character array, not the string.
536 ONLY_VALUE should be nonzero if the result is not going to be emitted
537 into the instruction stream and zero if it is going to be expanded.
538 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
539 is returned, otherwise NULL, since
540 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
541 evaluate the side-effects.
543 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
544 accesses. Note that this implies the result is not going to be emitted
545 into the instruction stream.
547 The value returned is of type `ssizetype'.
549 Unfortunately, string_constant can't access the values of const char
550 arrays with initializers, so neither can we do so here. */
552 tree
553 c_strlen (tree src, int only_value)
555 tree offset_node;
556 HOST_WIDE_INT offset;
557 int max;
558 const char *ptr;
559 location_t loc;
561 STRIP_NOPS (src);
562 if (TREE_CODE (src) == COND_EXPR
563 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
565 tree len1, len2;
567 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
568 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
569 if (tree_int_cst_equal (len1, len2))
570 return len1;
573 if (TREE_CODE (src) == COMPOUND_EXPR
574 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
575 return c_strlen (TREE_OPERAND (src, 1), only_value);
577 loc = EXPR_LOC_OR_LOC (src, input_location);
579 src = string_constant (src, &offset_node);
580 if (src == 0)
581 return NULL_TREE;
583 max = TREE_STRING_LENGTH (src) - 1;
584 ptr = TREE_STRING_POINTER (src);
586 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
588 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
589 compute the offset to the following null if we don't know where to
590 start searching for it. */
591 int i;
593 for (i = 0; i < max; i++)
594 if (ptr[i] == 0)
595 return NULL_TREE;
597 /* We don't know the starting offset, but we do know that the string
598 has no internal zero bytes. We can assume that the offset falls
599 within the bounds of the string; otherwise, the programmer deserves
600 what he gets. Subtract the offset from the length of the string,
601 and return that. This would perhaps not be valid if we were dealing
602 with named arrays in addition to literal string constants. */
604 return size_diffop_loc (loc, size_int (max), offset_node);
607 /* We have a known offset into the string. Start searching there for
608 a null character if we can represent it as a single HOST_WIDE_INT. */
609 if (offset_node == 0)
610 offset = 0;
611 else if (! tree_fits_shwi_p (offset_node))
612 offset = -1;
613 else
614 offset = tree_to_shwi (offset_node);
616 /* If the offset is known to be out of bounds, warn, and call strlen at
617 runtime. */
618 if (offset < 0 || offset > max)
620 /* Suppress multiple warnings for propagated constant strings. */
621 if (only_value != 2
622 && !TREE_NO_WARNING (src))
624 warning_at (loc, 0, "offset outside bounds of constant string");
625 TREE_NO_WARNING (src) = 1;
627 return NULL_TREE;
630 /* Use strlen to search for the first zero byte. Since any strings
631 constructed with build_string will have nulls appended, we win even
632 if we get handed something like (char[4])"abcd".
634 Since OFFSET is our starting index into the string, no further
635 calculation is needed. */
636 return ssize_int (strlen (ptr + offset));
639 /* Return a char pointer for a C string if it is a string constant
640 or sum of string constant and integer constant. */
642 const char *
643 c_getstr (tree src)
645 tree offset_node;
647 src = string_constant (src, &offset_node);
648 if (src == 0)
649 return 0;
651 if (offset_node == 0)
652 return TREE_STRING_POINTER (src);
653 else if (!tree_fits_uhwi_p (offset_node)
654 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
655 return 0;
657 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
660 /* Return a constant integer corresponding to target reading
661 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
663 static rtx
664 c_readstr (const char *str, machine_mode mode)
666 HOST_WIDE_INT ch;
667 unsigned int i, j;
668 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
670 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
671 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
672 / HOST_BITS_PER_WIDE_INT;
674 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
675 for (i = 0; i < len; i++)
676 tmp[i] = 0;
678 ch = 1;
679 for (i = 0; i < GET_MODE_SIZE (mode); i++)
681 j = i;
682 if (WORDS_BIG_ENDIAN)
683 j = GET_MODE_SIZE (mode) - i - 1;
684 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
685 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
686 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
687 j *= BITS_PER_UNIT;
689 if (ch)
690 ch = (unsigned char) str[i];
691 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
694 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
695 return immed_wide_int_const (c, mode);
698 /* Cast a target constant CST to target CHAR and if that value fits into
699 host char type, return zero and put that value into variable pointed to by
700 P. */
702 static int
703 target_char_cast (tree cst, char *p)
705 unsigned HOST_WIDE_INT val, hostval;
707 if (TREE_CODE (cst) != INTEGER_CST
708 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
709 return 1;
711 /* Do not care if it fits or not right here. */
712 val = TREE_INT_CST_LOW (cst);
714 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
715 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
717 hostval = val;
718 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
719 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
721 if (val != hostval)
722 return 1;
724 *p = hostval;
725 return 0;
728 /* Similar to save_expr, but assumes that arbitrary code is not executed
729 in between the multiple evaluations. In particular, we assume that a
730 non-addressable local variable will not be modified. */
732 static tree
733 builtin_save_expr (tree exp)
735 if (TREE_CODE (exp) == SSA_NAME
736 || (TREE_ADDRESSABLE (exp) == 0
737 && (TREE_CODE (exp) == PARM_DECL
738 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
739 return exp;
741 return save_expr (exp);
744 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
745 times to get the address of either a higher stack frame, or a return
746 address located within it (depending on FNDECL_CODE). */
748 static rtx
749 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
751 int i;
753 #ifdef INITIAL_FRAME_ADDRESS_RTX
754 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
755 #else
756 rtx tem;
758 /* For a zero count with __builtin_return_address, we don't care what
759 frame address we return, because target-specific definitions will
760 override us. Therefore frame pointer elimination is OK, and using
761 the soft frame pointer is OK.
763 For a nonzero count, or a zero count with __builtin_frame_address,
764 we require a stable offset from the current frame pointer to the
765 previous one, so we must use the hard frame pointer, and
766 we must disable frame pointer elimination. */
767 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
768 tem = frame_pointer_rtx;
769 else
771 tem = hard_frame_pointer_rtx;
773 /* Tell reload not to eliminate the frame pointer. */
774 crtl->accesses_prior_frames = 1;
776 #endif
778 /* Some machines need special handling before we can access
779 arbitrary frames. For example, on the SPARC, we must first flush
780 all register windows to the stack. */
781 #ifdef SETUP_FRAME_ADDRESSES
782 if (count > 0)
783 SETUP_FRAME_ADDRESSES ();
784 #endif
786 /* On the SPARC, the return address is not in the frame, it is in a
787 register. There is no way to access it off of the current frame
788 pointer, but it can be accessed off the previous frame pointer by
789 reading the value from the register window save area. */
790 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
791 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
792 count--;
793 #endif
795 /* Scan back COUNT frames to the specified frame. */
796 for (i = 0; i < count; i++)
798 /* Assume the dynamic chain pointer is in the word that the
799 frame address points to, unless otherwise specified. */
800 #ifdef DYNAMIC_CHAIN_ADDRESS
801 tem = DYNAMIC_CHAIN_ADDRESS (tem);
802 #endif
803 tem = memory_address (Pmode, tem);
804 tem = gen_frame_mem (Pmode, tem);
805 tem = copy_to_reg (tem);
808 /* For __builtin_frame_address, return what we've got. But, on
809 the SPARC for example, we may have to add a bias. */
810 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
811 #ifdef FRAME_ADDR_RTX
812 return FRAME_ADDR_RTX (tem);
813 #else
814 return tem;
815 #endif
817 /* For __builtin_return_address, get the return address from that frame. */
818 #ifdef RETURN_ADDR_RTX
819 tem = RETURN_ADDR_RTX (count, tem);
820 #else
821 tem = memory_address (Pmode,
822 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
823 tem = gen_frame_mem (Pmode, tem);
824 #endif
825 return tem;
828 /* Alias set used for setjmp buffer. */
829 static alias_set_type setjmp_alias_set = -1;
831 /* Construct the leading half of a __builtin_setjmp call. Control will
832 return to RECEIVER_LABEL. This is also called directly by the SJLJ
833 exception handling code. */
835 void
836 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
838 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
839 rtx stack_save;
840 rtx mem;
842 if (setjmp_alias_set == -1)
843 setjmp_alias_set = new_alias_set ();
845 buf_addr = convert_memory_address (Pmode, buf_addr);
847 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
849 /* We store the frame pointer and the address of receiver_label in
850 the buffer and use the rest of it for the stack save area, which
851 is machine-dependent. */
853 mem = gen_rtx_MEM (Pmode, buf_addr);
854 set_mem_alias_set (mem, setjmp_alias_set);
855 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
857 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
858 GET_MODE_SIZE (Pmode))),
859 set_mem_alias_set (mem, setjmp_alias_set);
861 emit_move_insn (validize_mem (mem),
862 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
864 stack_save = gen_rtx_MEM (sa_mode,
865 plus_constant (Pmode, buf_addr,
866 2 * GET_MODE_SIZE (Pmode)));
867 set_mem_alias_set (stack_save, setjmp_alias_set);
868 emit_stack_save (SAVE_NONLOCAL, &stack_save);
870 /* If there is further processing to do, do it. */
871 #ifdef HAVE_builtin_setjmp_setup
872 if (HAVE_builtin_setjmp_setup)
873 emit_insn (gen_builtin_setjmp_setup (buf_addr));
874 #endif
876 /* We have a nonlocal label. */
877 cfun->has_nonlocal_label = 1;
880 /* Construct the trailing part of a __builtin_setjmp call. This is
881 also called directly by the SJLJ exception handling code.
882 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
884 void
885 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
887 rtx chain;
889 /* Mark the FP as used when we get here, so we have to make sure it's
890 marked as used by this function. */
891 emit_use (hard_frame_pointer_rtx);
893 /* Mark the static chain as clobbered here so life information
894 doesn't get messed up for it. */
895 chain = targetm.calls.static_chain (current_function_decl, true);
896 if (chain && REG_P (chain))
897 emit_clobber (chain);
899 /* Now put in the code to restore the frame pointer, and argument
900 pointer, if needed. */
901 #ifdef HAVE_nonlocal_goto
902 if (! HAVE_nonlocal_goto)
903 #endif
905 /* First adjust our frame pointer to its actual value. It was
906 previously set to the start of the virtual area corresponding to
907 the stacked variables when we branched here and now needs to be
908 adjusted to the actual hardware fp value.
910 Assignments to virtual registers are converted by
911 instantiate_virtual_regs into the corresponding assignment
912 to the underlying register (fp in this case) that makes
913 the original assignment true.
914 So the following insn will actually be decrementing fp by
915 STARTING_FRAME_OFFSET. */
916 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
918 /* Restoring the frame pointer also modifies the hard frame pointer.
919 Mark it used (so that the previous assignment remains live once
920 the frame pointer is eliminated) and clobbered (to represent the
921 implicit update from the assignment). */
922 emit_use (hard_frame_pointer_rtx);
923 emit_clobber (hard_frame_pointer_rtx);
926 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
927 if (fixed_regs[ARG_POINTER_REGNUM])
929 #ifdef ELIMINABLE_REGS
930 /* If the argument pointer can be eliminated in favor of the
931 frame pointer, we don't need to restore it. We assume here
932 that if such an elimination is present, it can always be used.
933 This is the case on all known machines; if we don't make this
934 assumption, we do unnecessary saving on many machines. */
935 size_t i;
936 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
938 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
939 if (elim_regs[i].from == ARG_POINTER_REGNUM
940 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
941 break;
943 if (i == ARRAY_SIZE (elim_regs))
944 #endif
946 /* Now restore our arg pointer from the address at which it
947 was saved in our stack frame. */
948 emit_move_insn (crtl->args.internal_arg_pointer,
949 copy_to_reg (get_arg_pointer_save_area ()));
952 #endif
954 #ifdef HAVE_builtin_setjmp_receiver
955 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
956 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
957 else
958 #endif
959 #ifdef HAVE_nonlocal_goto_receiver
960 if (HAVE_nonlocal_goto_receiver)
961 emit_insn (gen_nonlocal_goto_receiver ());
962 else
963 #endif
964 { /* Nothing */ }
966 /* We must not allow the code we just generated to be reordered by
967 scheduling. Specifically, the update of the frame pointer must
968 happen immediately, not later. */
969 emit_insn (gen_blockage ());
972 /* __builtin_longjmp is passed a pointer to an array of five words (not
973 all will be used on all machines). It operates similarly to the C
974 library function of the same name, but is more efficient. Much of
975 the code below is copied from the handling of non-local gotos. */
977 static void
978 expand_builtin_longjmp (rtx buf_addr, rtx value)
980 rtx fp, lab, stack;
981 rtx_insn *insn, *last;
982 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
984 /* DRAP is needed for stack realign if longjmp is expanded to current
985 function */
986 if (SUPPORTS_STACK_ALIGNMENT)
987 crtl->need_drap = true;
989 if (setjmp_alias_set == -1)
990 setjmp_alias_set = new_alias_set ();
992 buf_addr = convert_memory_address (Pmode, buf_addr);
994 buf_addr = force_reg (Pmode, buf_addr);
996 /* We require that the user must pass a second argument of 1, because
997 that is what builtin_setjmp will return. */
998 gcc_assert (value == const1_rtx);
1000 last = get_last_insn ();
1001 #ifdef HAVE_builtin_longjmp
1002 if (HAVE_builtin_longjmp)
1003 emit_insn (gen_builtin_longjmp (buf_addr));
1004 else
1005 #endif
1007 fp = gen_rtx_MEM (Pmode, buf_addr);
1008 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1009 GET_MODE_SIZE (Pmode)));
1011 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1012 2 * GET_MODE_SIZE (Pmode)));
1013 set_mem_alias_set (fp, setjmp_alias_set);
1014 set_mem_alias_set (lab, setjmp_alias_set);
1015 set_mem_alias_set (stack, setjmp_alias_set);
1017 /* Pick up FP, label, and SP from the block and jump. This code is
1018 from expand_goto in stmt.c; see there for detailed comments. */
1019 #ifdef HAVE_nonlocal_goto
1020 if (HAVE_nonlocal_goto)
1021 /* We have to pass a value to the nonlocal_goto pattern that will
1022 get copied into the static_chain pointer, but it does not matter
1023 what that value is, because builtin_setjmp does not use it. */
1024 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1025 else
1026 #endif
1028 lab = copy_to_reg (lab);
1030 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1031 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1033 emit_move_insn (hard_frame_pointer_rtx, fp);
1034 emit_stack_restore (SAVE_NONLOCAL, stack);
1036 emit_use (hard_frame_pointer_rtx);
1037 emit_use (stack_pointer_rtx);
1038 emit_indirect_jump (lab);
1042 /* Search backwards and mark the jump insn as a non-local goto.
1043 Note that this precludes the use of __builtin_longjmp to a
1044 __builtin_setjmp target in the same function. However, we've
1045 already cautioned the user that these functions are for
1046 internal exception handling use only. */
1047 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1049 gcc_assert (insn != last);
1051 if (JUMP_P (insn))
1053 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1054 break;
1056 else if (CALL_P (insn))
1057 break;
1061 static inline bool
1062 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1064 return (iter->i < iter->n);
1067 /* This function validates the types of a function call argument list
1068 against a specified list of tree_codes. If the last specifier is a 0,
1069 that represents an ellipses, otherwise the last specifier must be a
1070 VOID_TYPE. */
1072 static bool
1073 validate_arglist (const_tree callexpr, ...)
1075 enum tree_code code;
1076 bool res = 0;
1077 va_list ap;
1078 const_call_expr_arg_iterator iter;
1079 const_tree arg;
1081 va_start (ap, callexpr);
1082 init_const_call_expr_arg_iterator (callexpr, &iter);
1086 code = (enum tree_code) va_arg (ap, int);
1087 switch (code)
1089 case 0:
1090 /* This signifies an ellipses, any further arguments are all ok. */
1091 res = true;
1092 goto end;
1093 case VOID_TYPE:
1094 /* This signifies an endlink, if no arguments remain, return
1095 true, otherwise return false. */
1096 res = !more_const_call_expr_args_p (&iter);
1097 goto end;
1098 default:
1099 /* If no parameters remain or the parameter's code does not
1100 match the specified code, return false. Otherwise continue
1101 checking any remaining arguments. */
1102 arg = next_const_call_expr_arg (&iter);
1103 if (!validate_arg (arg, code))
1104 goto end;
1105 break;
1108 while (1);
1110 /* We need gotos here since we can only have one VA_CLOSE in a
1111 function. */
1112 end: ;
1113 va_end (ap);
1115 return res;
1118 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1119 and the address of the save area. */
1121 static rtx
1122 expand_builtin_nonlocal_goto (tree exp)
1124 tree t_label, t_save_area;
1125 rtx r_label, r_save_area, r_fp, r_sp;
1126 rtx_insn *insn;
1128 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1129 return NULL_RTX;
1131 t_label = CALL_EXPR_ARG (exp, 0);
1132 t_save_area = CALL_EXPR_ARG (exp, 1);
1134 r_label = expand_normal (t_label);
1135 r_label = convert_memory_address (Pmode, r_label);
1136 r_save_area = expand_normal (t_save_area);
1137 r_save_area = convert_memory_address (Pmode, r_save_area);
1138 /* Copy the address of the save location to a register just in case it was
1139 based on the frame pointer. */
1140 r_save_area = copy_to_reg (r_save_area);
1141 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1142 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1143 plus_constant (Pmode, r_save_area,
1144 GET_MODE_SIZE (Pmode)));
1146 crtl->has_nonlocal_goto = 1;
1148 #ifdef HAVE_nonlocal_goto
1149 /* ??? We no longer need to pass the static chain value, afaik. */
1150 if (HAVE_nonlocal_goto)
1151 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1152 else
1153 #endif
1155 r_label = copy_to_reg (r_label);
1157 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1158 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1160 /* Restore frame pointer for containing function. */
1161 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1162 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1164 /* USE of hard_frame_pointer_rtx added for consistency;
1165 not clear if really needed. */
1166 emit_use (hard_frame_pointer_rtx);
1167 emit_use (stack_pointer_rtx);
1169 /* If the architecture is using a GP register, we must
1170 conservatively assume that the target function makes use of it.
1171 The prologue of functions with nonlocal gotos must therefore
1172 initialize the GP register to the appropriate value, and we
1173 must then make sure that this value is live at the point
1174 of the jump. (Note that this doesn't necessarily apply
1175 to targets with a nonlocal_goto pattern; they are free
1176 to implement it in their own way. Note also that this is
1177 a no-op if the GP register is a global invariant.) */
1178 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1179 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1180 emit_use (pic_offset_table_rtx);
1182 emit_indirect_jump (r_label);
1185 /* Search backwards to the jump insn and mark it as a
1186 non-local goto. */
1187 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1189 if (JUMP_P (insn))
1191 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1192 break;
1194 else if (CALL_P (insn))
1195 break;
1198 return const0_rtx;
1201 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1202 (not all will be used on all machines) that was passed to __builtin_setjmp.
1203 It updates the stack pointer in that block to correspond to the current
1204 stack pointer. */
1206 static void
1207 expand_builtin_update_setjmp_buf (rtx buf_addr)
1209 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1210 rtx stack_save
1211 = gen_rtx_MEM (sa_mode,
1212 memory_address
1213 (sa_mode,
1214 plus_constant (Pmode, buf_addr,
1215 2 * GET_MODE_SIZE (Pmode))));
1217 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1220 /* Expand a call to __builtin_prefetch. For a target that does not support
1221 data prefetch, evaluate the memory address argument in case it has side
1222 effects. */
1224 static void
1225 expand_builtin_prefetch (tree exp)
1227 tree arg0, arg1, arg2;
1228 int nargs;
1229 rtx op0, op1, op2;
1231 if (!validate_arglist (exp, POINTER_TYPE, 0))
1232 return;
1234 arg0 = CALL_EXPR_ARG (exp, 0);
1236 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1237 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1238 locality). */
1239 nargs = call_expr_nargs (exp);
1240 if (nargs > 1)
1241 arg1 = CALL_EXPR_ARG (exp, 1);
1242 else
1243 arg1 = integer_zero_node;
1244 if (nargs > 2)
1245 arg2 = CALL_EXPR_ARG (exp, 2);
1246 else
1247 arg2 = integer_three_node;
1249 /* Argument 0 is an address. */
1250 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1252 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1253 if (TREE_CODE (arg1) != INTEGER_CST)
1255 error ("second argument to %<__builtin_prefetch%> must be a constant");
1256 arg1 = integer_zero_node;
1258 op1 = expand_normal (arg1);
1259 /* Argument 1 must be either zero or one. */
1260 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1262 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1263 " using zero");
1264 op1 = const0_rtx;
1267 /* Argument 2 (locality) must be a compile-time constant int. */
1268 if (TREE_CODE (arg2) != INTEGER_CST)
1270 error ("third argument to %<__builtin_prefetch%> must be a constant");
1271 arg2 = integer_zero_node;
1273 op2 = expand_normal (arg2);
1274 /* Argument 2 must be 0, 1, 2, or 3. */
1275 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1277 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1278 op2 = const0_rtx;
1281 #ifdef HAVE_prefetch
1282 if (HAVE_prefetch)
1284 struct expand_operand ops[3];
1286 create_address_operand (&ops[0], op0);
1287 create_integer_operand (&ops[1], INTVAL (op1));
1288 create_integer_operand (&ops[2], INTVAL (op2));
1289 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1290 return;
1292 #endif
1294 /* Don't do anything with direct references to volatile memory, but
1295 generate code to handle other side effects. */
1296 if (!MEM_P (op0) && side_effects_p (op0))
1297 emit_insn (op0);
1300 /* Get a MEM rtx for expression EXP which is the address of an operand
1301 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1302 the maximum length of the block of memory that might be accessed or
1303 NULL if unknown. */
1305 static rtx
1306 get_memory_rtx (tree exp, tree len)
1308 tree orig_exp = exp;
1309 rtx addr, mem;
1311 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1312 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1313 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1314 exp = TREE_OPERAND (exp, 0);
1316 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1317 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1319 /* Get an expression we can use to find the attributes to assign to MEM.
1320 First remove any nops. */
1321 while (CONVERT_EXPR_P (exp)
1322 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1323 exp = TREE_OPERAND (exp, 0);
1325 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1326 (as builtin stringops may alias with anything). */
1327 exp = fold_build2 (MEM_REF,
1328 build_array_type (char_type_node,
1329 build_range_type (sizetype,
1330 size_one_node, len)),
1331 exp, build_int_cst (ptr_type_node, 0));
1333 /* If the MEM_REF has no acceptable address, try to get the base object
1334 from the original address we got, and build an all-aliasing
1335 unknown-sized access to that one. */
1336 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1337 set_mem_attributes (mem, exp, 0);
1338 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1339 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1340 0))))
1342 exp = build_fold_addr_expr (exp);
1343 exp = fold_build2 (MEM_REF,
1344 build_array_type (char_type_node,
1345 build_range_type (sizetype,
1346 size_zero_node,
1347 NULL)),
1348 exp, build_int_cst (ptr_type_node, 0));
1349 set_mem_attributes (mem, exp, 0);
1351 set_mem_alias_set (mem, 0);
1352 return mem;
1355 /* Built-in functions to perform an untyped call and return. */
1357 #define apply_args_mode \
1358 (this_target_builtins->x_apply_args_mode)
1359 #define apply_result_mode \
1360 (this_target_builtins->x_apply_result_mode)
1362 /* Return the size required for the block returned by __builtin_apply_args,
1363 and initialize apply_args_mode. */
1365 static int
1366 apply_args_size (void)
1368 static int size = -1;
1369 int align;
1370 unsigned int regno;
1371 machine_mode mode;
1373 /* The values computed by this function never change. */
1374 if (size < 0)
1376 /* The first value is the incoming arg-pointer. */
1377 size = GET_MODE_SIZE (Pmode);
1379 /* The second value is the structure value address unless this is
1380 passed as an "invisible" first argument. */
1381 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1382 size += GET_MODE_SIZE (Pmode);
1384 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1385 if (FUNCTION_ARG_REGNO_P (regno))
1387 mode = targetm.calls.get_raw_arg_mode (regno);
1389 gcc_assert (mode != VOIDmode);
1391 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1392 if (size % align != 0)
1393 size = CEIL (size, align) * align;
1394 size += GET_MODE_SIZE (mode);
1395 apply_args_mode[regno] = mode;
1397 else
1399 apply_args_mode[regno] = VOIDmode;
1402 return size;
1405 /* Return the size required for the block returned by __builtin_apply,
1406 and initialize apply_result_mode. */
1408 static int
1409 apply_result_size (void)
1411 static int size = -1;
1412 int align, regno;
1413 machine_mode mode;
1415 /* The values computed by this function never change. */
1416 if (size < 0)
1418 size = 0;
1420 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1421 if (targetm.calls.function_value_regno_p (regno))
1423 mode = targetm.calls.get_raw_result_mode (regno);
1425 gcc_assert (mode != VOIDmode);
1427 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1428 if (size % align != 0)
1429 size = CEIL (size, align) * align;
1430 size += GET_MODE_SIZE (mode);
1431 apply_result_mode[regno] = mode;
1433 else
1434 apply_result_mode[regno] = VOIDmode;
1436 /* Allow targets that use untyped_call and untyped_return to override
1437 the size so that machine-specific information can be stored here. */
1438 #ifdef APPLY_RESULT_SIZE
1439 size = APPLY_RESULT_SIZE;
1440 #endif
1442 return size;
1445 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1446 /* Create a vector describing the result block RESULT. If SAVEP is true,
1447 the result block is used to save the values; otherwise it is used to
1448 restore the values. */
1450 static rtx
1451 result_vector (int savep, rtx result)
1453 int regno, size, align, nelts;
1454 machine_mode mode;
1455 rtx reg, mem;
1456 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1458 size = nelts = 0;
1459 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1460 if ((mode = apply_result_mode[regno]) != VOIDmode)
1462 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1463 if (size % align != 0)
1464 size = CEIL (size, align) * align;
1465 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1466 mem = adjust_address (result, mode, size);
1467 savevec[nelts++] = (savep
1468 ? gen_rtx_SET (VOIDmode, mem, reg)
1469 : gen_rtx_SET (VOIDmode, reg, mem));
1470 size += GET_MODE_SIZE (mode);
1472 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1474 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1476 /* Save the state required to perform an untyped call with the same
1477 arguments as were passed to the current function. */
1479 static rtx
1480 expand_builtin_apply_args_1 (void)
1482 rtx registers, tem;
1483 int size, align, regno;
1484 machine_mode mode;
1485 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1487 /* Create a block where the arg-pointer, structure value address,
1488 and argument registers can be saved. */
1489 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1491 /* Walk past the arg-pointer and structure value address. */
1492 size = GET_MODE_SIZE (Pmode);
1493 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1494 size += GET_MODE_SIZE (Pmode);
1496 /* Save each register used in calling a function to the block. */
1497 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1498 if ((mode = apply_args_mode[regno]) != VOIDmode)
1500 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1501 if (size % align != 0)
1502 size = CEIL (size, align) * align;
1504 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1506 emit_move_insn (adjust_address (registers, mode, size), tem);
1507 size += GET_MODE_SIZE (mode);
1510 /* Save the arg pointer to the block. */
1511 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1512 #ifdef STACK_GROWS_DOWNWARD
1513 /* We need the pointer as the caller actually passed them to us, not
1514 as we might have pretended they were passed. Make sure it's a valid
1515 operand, as emit_move_insn isn't expected to handle a PLUS. */
1517 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1518 NULL_RTX);
1519 #endif
1520 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1522 size = GET_MODE_SIZE (Pmode);
1524 /* Save the structure value address unless this is passed as an
1525 "invisible" first argument. */
1526 if (struct_incoming_value)
1528 emit_move_insn (adjust_address (registers, Pmode, size),
1529 copy_to_reg (struct_incoming_value));
1530 size += GET_MODE_SIZE (Pmode);
1533 /* Return the address of the block. */
1534 return copy_addr_to_reg (XEXP (registers, 0));
1537 /* __builtin_apply_args returns block of memory allocated on
1538 the stack into which is stored the arg pointer, structure
1539 value address, static chain, and all the registers that might
1540 possibly be used in performing a function call. The code is
1541 moved to the start of the function so the incoming values are
1542 saved. */
1544 static rtx
1545 expand_builtin_apply_args (void)
1547 /* Don't do __builtin_apply_args more than once in a function.
1548 Save the result of the first call and reuse it. */
1549 if (apply_args_value != 0)
1550 return apply_args_value;
1552 /* When this function is called, it means that registers must be
1553 saved on entry to this function. So we migrate the
1554 call to the first insn of this function. */
1555 rtx temp;
1556 rtx seq;
1558 start_sequence ();
1559 temp = expand_builtin_apply_args_1 ();
1560 seq = get_insns ();
1561 end_sequence ();
1563 apply_args_value = temp;
1565 /* Put the insns after the NOTE that starts the function.
1566 If this is inside a start_sequence, make the outer-level insn
1567 chain current, so the code is placed at the start of the
1568 function. If internal_arg_pointer is a non-virtual pseudo,
1569 it needs to be placed after the function that initializes
1570 that pseudo. */
1571 push_topmost_sequence ();
1572 if (REG_P (crtl->args.internal_arg_pointer)
1573 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1574 emit_insn_before (seq, parm_birth_insn);
1575 else
1576 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1577 pop_topmost_sequence ();
1578 return temp;
1582 /* Perform an untyped call and save the state required to perform an
1583 untyped return of whatever value was returned by the given function. */
1585 static rtx
1586 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1588 int size, align, regno;
1589 machine_mode mode;
1590 rtx incoming_args, result, reg, dest, src;
1591 rtx_call_insn *call_insn;
1592 rtx old_stack_level = 0;
1593 rtx call_fusage = 0;
1594 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1596 arguments = convert_memory_address (Pmode, arguments);
1598 /* Create a block where the return registers can be saved. */
1599 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1601 /* Fetch the arg pointer from the ARGUMENTS block. */
1602 incoming_args = gen_reg_rtx (Pmode);
1603 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1604 #ifndef STACK_GROWS_DOWNWARD
1605 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1606 incoming_args, 0, OPTAB_LIB_WIDEN);
1607 #endif
1609 /* Push a new argument block and copy the arguments. Do not allow
1610 the (potential) memcpy call below to interfere with our stack
1611 manipulations. */
1612 do_pending_stack_adjust ();
1613 NO_DEFER_POP;
1615 /* Save the stack with nonlocal if available. */
1616 #ifdef HAVE_save_stack_nonlocal
1617 if (HAVE_save_stack_nonlocal)
1618 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1619 else
1620 #endif
1621 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1623 /* Allocate a block of memory onto the stack and copy the memory
1624 arguments to the outgoing arguments address. We can pass TRUE
1625 as the 4th argument because we just saved the stack pointer
1626 and will restore it right after the call. */
1627 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1629 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1630 may have already set current_function_calls_alloca to true.
1631 current_function_calls_alloca won't be set if argsize is zero,
1632 so we have to guarantee need_drap is true here. */
1633 if (SUPPORTS_STACK_ALIGNMENT)
1634 crtl->need_drap = true;
1636 dest = virtual_outgoing_args_rtx;
1637 #ifndef STACK_GROWS_DOWNWARD
1638 if (CONST_INT_P (argsize))
1639 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1640 else
1641 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1642 #endif
1643 dest = gen_rtx_MEM (BLKmode, dest);
1644 set_mem_align (dest, PARM_BOUNDARY);
1645 src = gen_rtx_MEM (BLKmode, incoming_args);
1646 set_mem_align (src, PARM_BOUNDARY);
1647 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1649 /* Refer to the argument block. */
1650 apply_args_size ();
1651 arguments = gen_rtx_MEM (BLKmode, arguments);
1652 set_mem_align (arguments, PARM_BOUNDARY);
1654 /* Walk past the arg-pointer and structure value address. */
1655 size = GET_MODE_SIZE (Pmode);
1656 if (struct_value)
1657 size += GET_MODE_SIZE (Pmode);
1659 /* Restore each of the registers previously saved. Make USE insns
1660 for each of these registers for use in making the call. */
1661 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1662 if ((mode = apply_args_mode[regno]) != VOIDmode)
1664 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1665 if (size % align != 0)
1666 size = CEIL (size, align) * align;
1667 reg = gen_rtx_REG (mode, regno);
1668 emit_move_insn (reg, adjust_address (arguments, mode, size));
1669 use_reg (&call_fusage, reg);
1670 size += GET_MODE_SIZE (mode);
1673 /* Restore the structure value address unless this is passed as an
1674 "invisible" first argument. */
1675 size = GET_MODE_SIZE (Pmode);
1676 if (struct_value)
1678 rtx value = gen_reg_rtx (Pmode);
1679 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1680 emit_move_insn (struct_value, value);
1681 if (REG_P (struct_value))
1682 use_reg (&call_fusage, struct_value);
1683 size += GET_MODE_SIZE (Pmode);
1686 /* All arguments and registers used for the call are set up by now! */
1687 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1689 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1690 and we don't want to load it into a register as an optimization,
1691 because prepare_call_address already did it if it should be done. */
1692 if (GET_CODE (function) != SYMBOL_REF)
1693 function = memory_address (FUNCTION_MODE, function);
1695 /* Generate the actual call instruction and save the return value. */
1696 #ifdef HAVE_untyped_call
1697 if (HAVE_untyped_call)
1698 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1699 result, result_vector (1, result)));
1700 else
1701 #endif
1702 #ifdef HAVE_call_value
1703 if (HAVE_call_value)
1705 rtx valreg = 0;
1707 /* Locate the unique return register. It is not possible to
1708 express a call that sets more than one return register using
1709 call_value; use untyped_call for that. In fact, untyped_call
1710 only needs to save the return registers in the given block. */
1711 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1712 if ((mode = apply_result_mode[regno]) != VOIDmode)
1714 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1716 valreg = gen_rtx_REG (mode, regno);
1719 emit_call_insn (GEN_CALL_VALUE (valreg,
1720 gen_rtx_MEM (FUNCTION_MODE, function),
1721 const0_rtx, NULL_RTX, const0_rtx));
1723 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1725 else
1726 #endif
1727 gcc_unreachable ();
1729 /* Find the CALL insn we just emitted, and attach the register usage
1730 information. */
1731 call_insn = last_call_insn ();
1732 add_function_usage_to (call_insn, call_fusage);
1734 /* Restore the stack. */
1735 #ifdef HAVE_save_stack_nonlocal
1736 if (HAVE_save_stack_nonlocal)
1737 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1738 else
1739 #endif
1740 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1741 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1743 OK_DEFER_POP;
1745 /* Return the address of the result block. */
1746 result = copy_addr_to_reg (XEXP (result, 0));
1747 return convert_memory_address (ptr_mode, result);
1750 /* Perform an untyped return. */
1752 static void
1753 expand_builtin_return (rtx result)
1755 int size, align, regno;
1756 machine_mode mode;
1757 rtx reg;
1758 rtx_insn *call_fusage = 0;
1760 result = convert_memory_address (Pmode, result);
1762 apply_result_size ();
1763 result = gen_rtx_MEM (BLKmode, result);
1765 #ifdef HAVE_untyped_return
1766 if (HAVE_untyped_return)
1768 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1769 emit_barrier ();
1770 return;
1772 #endif
1774 /* Restore the return value and note that each value is used. */
1775 size = 0;
1776 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1777 if ((mode = apply_result_mode[regno]) != VOIDmode)
1779 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1780 if (size % align != 0)
1781 size = CEIL (size, align) * align;
1782 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1783 emit_move_insn (reg, adjust_address (result, mode, size));
1785 push_to_sequence (call_fusage);
1786 emit_use (reg);
1787 call_fusage = get_insns ();
1788 end_sequence ();
1789 size += GET_MODE_SIZE (mode);
1792 /* Put the USE insns before the return. */
1793 emit_insn (call_fusage);
1795 /* Return whatever values was restored by jumping directly to the end
1796 of the function. */
1797 expand_naked_return ();
1800 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1802 static enum type_class
1803 type_to_class (tree type)
1805 switch (TREE_CODE (type))
1807 case VOID_TYPE: return void_type_class;
1808 case INTEGER_TYPE: return integer_type_class;
1809 case ENUMERAL_TYPE: return enumeral_type_class;
1810 case BOOLEAN_TYPE: return boolean_type_class;
1811 case POINTER_TYPE: return pointer_type_class;
1812 case REFERENCE_TYPE: return reference_type_class;
1813 case OFFSET_TYPE: return offset_type_class;
1814 case REAL_TYPE: return real_type_class;
1815 case COMPLEX_TYPE: return complex_type_class;
1816 case FUNCTION_TYPE: return function_type_class;
1817 case METHOD_TYPE: return method_type_class;
1818 case RECORD_TYPE: return record_type_class;
1819 case UNION_TYPE:
1820 case QUAL_UNION_TYPE: return union_type_class;
1821 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1822 ? string_type_class : array_type_class);
1823 case LANG_TYPE: return lang_type_class;
1824 default: return no_type_class;
1828 /* Expand a call EXP to __builtin_classify_type. */
1830 static rtx
1831 expand_builtin_classify_type (tree exp)
1833 if (call_expr_nargs (exp))
1834 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1835 return GEN_INT (no_type_class);
1838 /* This helper macro, meant to be used in mathfn_built_in below,
1839 determines which among a set of three builtin math functions is
1840 appropriate for a given type mode. The `F' and `L' cases are
1841 automatically generated from the `double' case. */
1842 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1843 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1844 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1845 fcodel = BUILT_IN_MATHFN##L ; break;
1846 /* Similar to above, but appends _R after any F/L suffix. */
1847 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1848 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1849 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1850 fcodel = BUILT_IN_MATHFN##L_R ; break;
1852 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1853 if available. If IMPLICIT is true use the implicit builtin declaration,
1854 otherwise use the explicit declaration. If we can't do the conversion,
1855 return zero. */
1857 static tree
1858 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1860 enum built_in_function fcode, fcodef, fcodel, fcode2;
1862 switch (fn)
1864 CASE_MATHFN (BUILT_IN_ACOS)
1865 CASE_MATHFN (BUILT_IN_ACOSH)
1866 CASE_MATHFN (BUILT_IN_ASIN)
1867 CASE_MATHFN (BUILT_IN_ASINH)
1868 CASE_MATHFN (BUILT_IN_ATAN)
1869 CASE_MATHFN (BUILT_IN_ATAN2)
1870 CASE_MATHFN (BUILT_IN_ATANH)
1871 CASE_MATHFN (BUILT_IN_CBRT)
1872 CASE_MATHFN (BUILT_IN_CEIL)
1873 CASE_MATHFN (BUILT_IN_CEXPI)
1874 CASE_MATHFN (BUILT_IN_COPYSIGN)
1875 CASE_MATHFN (BUILT_IN_COS)
1876 CASE_MATHFN (BUILT_IN_COSH)
1877 CASE_MATHFN (BUILT_IN_DREM)
1878 CASE_MATHFN (BUILT_IN_ERF)
1879 CASE_MATHFN (BUILT_IN_ERFC)
1880 CASE_MATHFN (BUILT_IN_EXP)
1881 CASE_MATHFN (BUILT_IN_EXP10)
1882 CASE_MATHFN (BUILT_IN_EXP2)
1883 CASE_MATHFN (BUILT_IN_EXPM1)
1884 CASE_MATHFN (BUILT_IN_FABS)
1885 CASE_MATHFN (BUILT_IN_FDIM)
1886 CASE_MATHFN (BUILT_IN_FLOOR)
1887 CASE_MATHFN (BUILT_IN_FMA)
1888 CASE_MATHFN (BUILT_IN_FMAX)
1889 CASE_MATHFN (BUILT_IN_FMIN)
1890 CASE_MATHFN (BUILT_IN_FMOD)
1891 CASE_MATHFN (BUILT_IN_FREXP)
1892 CASE_MATHFN (BUILT_IN_GAMMA)
1893 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1894 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1895 CASE_MATHFN (BUILT_IN_HYPOT)
1896 CASE_MATHFN (BUILT_IN_ILOGB)
1897 CASE_MATHFN (BUILT_IN_ICEIL)
1898 CASE_MATHFN (BUILT_IN_IFLOOR)
1899 CASE_MATHFN (BUILT_IN_INF)
1900 CASE_MATHFN (BUILT_IN_IRINT)
1901 CASE_MATHFN (BUILT_IN_IROUND)
1902 CASE_MATHFN (BUILT_IN_ISINF)
1903 CASE_MATHFN (BUILT_IN_J0)
1904 CASE_MATHFN (BUILT_IN_J1)
1905 CASE_MATHFN (BUILT_IN_JN)
1906 CASE_MATHFN (BUILT_IN_LCEIL)
1907 CASE_MATHFN (BUILT_IN_LDEXP)
1908 CASE_MATHFN (BUILT_IN_LFLOOR)
1909 CASE_MATHFN (BUILT_IN_LGAMMA)
1910 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1911 CASE_MATHFN (BUILT_IN_LLCEIL)
1912 CASE_MATHFN (BUILT_IN_LLFLOOR)
1913 CASE_MATHFN (BUILT_IN_LLRINT)
1914 CASE_MATHFN (BUILT_IN_LLROUND)
1915 CASE_MATHFN (BUILT_IN_LOG)
1916 CASE_MATHFN (BUILT_IN_LOG10)
1917 CASE_MATHFN (BUILT_IN_LOG1P)
1918 CASE_MATHFN (BUILT_IN_LOG2)
1919 CASE_MATHFN (BUILT_IN_LOGB)
1920 CASE_MATHFN (BUILT_IN_LRINT)
1921 CASE_MATHFN (BUILT_IN_LROUND)
1922 CASE_MATHFN (BUILT_IN_MODF)
1923 CASE_MATHFN (BUILT_IN_NAN)
1924 CASE_MATHFN (BUILT_IN_NANS)
1925 CASE_MATHFN (BUILT_IN_NEARBYINT)
1926 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1927 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1928 CASE_MATHFN (BUILT_IN_POW)
1929 CASE_MATHFN (BUILT_IN_POWI)
1930 CASE_MATHFN (BUILT_IN_POW10)
1931 CASE_MATHFN (BUILT_IN_REMAINDER)
1932 CASE_MATHFN (BUILT_IN_REMQUO)
1933 CASE_MATHFN (BUILT_IN_RINT)
1934 CASE_MATHFN (BUILT_IN_ROUND)
1935 CASE_MATHFN (BUILT_IN_SCALB)
1936 CASE_MATHFN (BUILT_IN_SCALBLN)
1937 CASE_MATHFN (BUILT_IN_SCALBN)
1938 CASE_MATHFN (BUILT_IN_SIGNBIT)
1939 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1940 CASE_MATHFN (BUILT_IN_SIN)
1941 CASE_MATHFN (BUILT_IN_SINCOS)
1942 CASE_MATHFN (BUILT_IN_SINH)
1943 CASE_MATHFN (BUILT_IN_SQRT)
1944 CASE_MATHFN (BUILT_IN_TAN)
1945 CASE_MATHFN (BUILT_IN_TANH)
1946 CASE_MATHFN (BUILT_IN_TGAMMA)
1947 CASE_MATHFN (BUILT_IN_TRUNC)
1948 CASE_MATHFN (BUILT_IN_Y0)
1949 CASE_MATHFN (BUILT_IN_Y1)
1950 CASE_MATHFN (BUILT_IN_YN)
1952 default:
1953 return NULL_TREE;
1956 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1957 fcode2 = fcode;
1958 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1959 fcode2 = fcodef;
1960 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1961 fcode2 = fcodel;
1962 else
1963 return NULL_TREE;
1965 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1966 return NULL_TREE;
1968 return builtin_decl_explicit (fcode2);
1971 /* Like mathfn_built_in_1(), but always use the implicit array. */
1973 tree
1974 mathfn_built_in (tree type, enum built_in_function fn)
1976 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1979 /* If errno must be maintained, expand the RTL to check if the result,
1980 TARGET, of a built-in function call, EXP, is NaN, and if so set
1981 errno to EDOM. */
1983 static void
1984 expand_errno_check (tree exp, rtx target)
1986 rtx_code_label *lab = gen_label_rtx ();
1988 /* Test the result; if it is NaN, set errno=EDOM because
1989 the argument was not in the domain. */
1990 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1991 NULL_RTX, NULL_RTX, lab,
1992 /* The jump is very likely. */
1993 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1995 #ifdef TARGET_EDOM
1996 /* If this built-in doesn't throw an exception, set errno directly. */
1997 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1999 #ifdef GEN_ERRNO_RTX
2000 rtx errno_rtx = GEN_ERRNO_RTX;
2001 #else
2002 rtx errno_rtx
2003 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2004 #endif
2005 emit_move_insn (errno_rtx,
2006 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2007 emit_label (lab);
2008 return;
2010 #endif
2012 /* Make sure the library call isn't expanded as a tail call. */
2013 CALL_EXPR_TAILCALL (exp) = 0;
2015 /* We can't set errno=EDOM directly; let the library call do it.
2016 Pop the arguments right away in case the call gets deleted. */
2017 NO_DEFER_POP;
2018 expand_call (exp, target, 0);
2019 OK_DEFER_POP;
2020 emit_label (lab);
2023 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2024 Return NULL_RTX if a normal call should be emitted rather than expanding
2025 the function in-line. EXP is the expression that is a call to the builtin
2026 function; if convenient, the result should be placed in TARGET.
2027 SUBTARGET may be used as the target for computing one of EXP's operands. */
2029 static rtx
2030 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2032 optab builtin_optab;
2033 rtx op0;
2034 rtx_insn *insns;
2035 tree fndecl = get_callee_fndecl (exp);
2036 machine_mode mode;
2037 bool errno_set = false;
2038 bool try_widening = false;
2039 tree arg;
2041 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2042 return NULL_RTX;
2044 arg = CALL_EXPR_ARG (exp, 0);
2046 switch (DECL_FUNCTION_CODE (fndecl))
2048 CASE_FLT_FN (BUILT_IN_SQRT):
2049 errno_set = ! tree_expr_nonnegative_p (arg);
2050 try_widening = true;
2051 builtin_optab = sqrt_optab;
2052 break;
2053 CASE_FLT_FN (BUILT_IN_EXP):
2054 errno_set = true; builtin_optab = exp_optab; break;
2055 CASE_FLT_FN (BUILT_IN_EXP10):
2056 CASE_FLT_FN (BUILT_IN_POW10):
2057 errno_set = true; builtin_optab = exp10_optab; break;
2058 CASE_FLT_FN (BUILT_IN_EXP2):
2059 errno_set = true; builtin_optab = exp2_optab; break;
2060 CASE_FLT_FN (BUILT_IN_EXPM1):
2061 errno_set = true; builtin_optab = expm1_optab; break;
2062 CASE_FLT_FN (BUILT_IN_LOGB):
2063 errno_set = true; builtin_optab = logb_optab; break;
2064 CASE_FLT_FN (BUILT_IN_LOG):
2065 errno_set = true; builtin_optab = log_optab; break;
2066 CASE_FLT_FN (BUILT_IN_LOG10):
2067 errno_set = true; builtin_optab = log10_optab; break;
2068 CASE_FLT_FN (BUILT_IN_LOG2):
2069 errno_set = true; builtin_optab = log2_optab; break;
2070 CASE_FLT_FN (BUILT_IN_LOG1P):
2071 errno_set = true; builtin_optab = log1p_optab; break;
2072 CASE_FLT_FN (BUILT_IN_ASIN):
2073 builtin_optab = asin_optab; break;
2074 CASE_FLT_FN (BUILT_IN_ACOS):
2075 builtin_optab = acos_optab; break;
2076 CASE_FLT_FN (BUILT_IN_TAN):
2077 builtin_optab = tan_optab; break;
2078 CASE_FLT_FN (BUILT_IN_ATAN):
2079 builtin_optab = atan_optab; break;
2080 CASE_FLT_FN (BUILT_IN_FLOOR):
2081 builtin_optab = floor_optab; break;
2082 CASE_FLT_FN (BUILT_IN_CEIL):
2083 builtin_optab = ceil_optab; break;
2084 CASE_FLT_FN (BUILT_IN_TRUNC):
2085 builtin_optab = btrunc_optab; break;
2086 CASE_FLT_FN (BUILT_IN_ROUND):
2087 builtin_optab = round_optab; break;
2088 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2089 builtin_optab = nearbyint_optab;
2090 if (flag_trapping_math)
2091 break;
2092 /* Else fallthrough and expand as rint. */
2093 CASE_FLT_FN (BUILT_IN_RINT):
2094 builtin_optab = rint_optab; break;
2095 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2096 builtin_optab = significand_optab; break;
2097 default:
2098 gcc_unreachable ();
2101 /* Make a suitable register to place result in. */
2102 mode = TYPE_MODE (TREE_TYPE (exp));
2104 if (! flag_errno_math || ! HONOR_NANS (mode))
2105 errno_set = false;
2107 /* Before working hard, check whether the instruction is available, but try
2108 to widen the mode for specific operations. */
2109 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2110 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2111 && (!errno_set || !optimize_insn_for_size_p ()))
2113 rtx result = gen_reg_rtx (mode);
2115 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2116 need to expand the argument again. This way, we will not perform
2117 side-effects more the once. */
2118 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2120 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2122 start_sequence ();
2124 /* Compute into RESULT.
2125 Set RESULT to wherever the result comes back. */
2126 result = expand_unop (mode, builtin_optab, op0, result, 0);
2128 if (result != 0)
2130 if (errno_set)
2131 expand_errno_check (exp, result);
2133 /* Output the entire sequence. */
2134 insns = get_insns ();
2135 end_sequence ();
2136 emit_insn (insns);
2137 return result;
2140 /* If we were unable to expand via the builtin, stop the sequence
2141 (without outputting the insns) and call to the library function
2142 with the stabilized argument list. */
2143 end_sequence ();
2146 return expand_call (exp, target, target == const0_rtx);
2149 /* Expand a call to the builtin binary math functions (pow and atan2).
2150 Return NULL_RTX if a normal call should be emitted rather than expanding the
2151 function in-line. EXP is the expression that is a call to the builtin
2152 function; if convenient, the result should be placed in TARGET.
2153 SUBTARGET may be used as the target for computing one of EXP's
2154 operands. */
2156 static rtx
2157 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2159 optab builtin_optab;
2160 rtx op0, op1, result;
2161 rtx_insn *insns;
2162 int op1_type = REAL_TYPE;
2163 tree fndecl = get_callee_fndecl (exp);
2164 tree arg0, arg1;
2165 machine_mode mode;
2166 bool errno_set = true;
2168 switch (DECL_FUNCTION_CODE (fndecl))
2170 CASE_FLT_FN (BUILT_IN_SCALBN):
2171 CASE_FLT_FN (BUILT_IN_SCALBLN):
2172 CASE_FLT_FN (BUILT_IN_LDEXP):
2173 op1_type = INTEGER_TYPE;
2174 default:
2175 break;
2178 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2179 return NULL_RTX;
2181 arg0 = CALL_EXPR_ARG (exp, 0);
2182 arg1 = CALL_EXPR_ARG (exp, 1);
2184 switch (DECL_FUNCTION_CODE (fndecl))
2186 CASE_FLT_FN (BUILT_IN_POW):
2187 builtin_optab = pow_optab; break;
2188 CASE_FLT_FN (BUILT_IN_ATAN2):
2189 builtin_optab = atan2_optab; break;
2190 CASE_FLT_FN (BUILT_IN_SCALB):
2191 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2192 return 0;
2193 builtin_optab = scalb_optab; break;
2194 CASE_FLT_FN (BUILT_IN_SCALBN):
2195 CASE_FLT_FN (BUILT_IN_SCALBLN):
2196 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2197 return 0;
2198 /* Fall through... */
2199 CASE_FLT_FN (BUILT_IN_LDEXP):
2200 builtin_optab = ldexp_optab; break;
2201 CASE_FLT_FN (BUILT_IN_FMOD):
2202 builtin_optab = fmod_optab; break;
2203 CASE_FLT_FN (BUILT_IN_REMAINDER):
2204 CASE_FLT_FN (BUILT_IN_DREM):
2205 builtin_optab = remainder_optab; break;
2206 default:
2207 gcc_unreachable ();
2210 /* Make a suitable register to place result in. */
2211 mode = TYPE_MODE (TREE_TYPE (exp));
2213 /* Before working hard, check whether the instruction is available. */
2214 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2215 return NULL_RTX;
2217 result = gen_reg_rtx (mode);
2219 if (! flag_errno_math || ! HONOR_NANS (mode))
2220 errno_set = false;
2222 if (errno_set && optimize_insn_for_size_p ())
2223 return 0;
2225 /* Always stabilize the argument list. */
2226 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2227 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2229 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2230 op1 = expand_normal (arg1);
2232 start_sequence ();
2234 /* Compute into RESULT.
2235 Set RESULT to wherever the result comes back. */
2236 result = expand_binop (mode, builtin_optab, op0, op1,
2237 result, 0, OPTAB_DIRECT);
2239 /* If we were unable to expand via the builtin, stop the sequence
2240 (without outputting the insns) and call to the library function
2241 with the stabilized argument list. */
2242 if (result == 0)
2244 end_sequence ();
2245 return expand_call (exp, target, target == const0_rtx);
2248 if (errno_set)
2249 expand_errno_check (exp, result);
2251 /* Output the entire sequence. */
2252 insns = get_insns ();
2253 end_sequence ();
2254 emit_insn (insns);
2256 return result;
2259 /* Expand a call to the builtin trinary math functions (fma).
2260 Return NULL_RTX if a normal call should be emitted rather than expanding the
2261 function in-line. EXP is the expression that is a call to the builtin
2262 function; if convenient, the result should be placed in TARGET.
2263 SUBTARGET may be used as the target for computing one of EXP's
2264 operands. */
2266 static rtx
2267 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2269 optab builtin_optab;
2270 rtx op0, op1, op2, result;
2271 rtx_insn *insns;
2272 tree fndecl = get_callee_fndecl (exp);
2273 tree arg0, arg1, arg2;
2274 machine_mode mode;
2276 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2277 return NULL_RTX;
2279 arg0 = CALL_EXPR_ARG (exp, 0);
2280 arg1 = CALL_EXPR_ARG (exp, 1);
2281 arg2 = CALL_EXPR_ARG (exp, 2);
2283 switch (DECL_FUNCTION_CODE (fndecl))
2285 CASE_FLT_FN (BUILT_IN_FMA):
2286 builtin_optab = fma_optab; break;
2287 default:
2288 gcc_unreachable ();
2291 /* Make a suitable register to place result in. */
2292 mode = TYPE_MODE (TREE_TYPE (exp));
2294 /* Before working hard, check whether the instruction is available. */
2295 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2296 return NULL_RTX;
2298 result = gen_reg_rtx (mode);
2300 /* Always stabilize the argument list. */
2301 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2302 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2303 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2305 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2306 op1 = expand_normal (arg1);
2307 op2 = expand_normal (arg2);
2309 start_sequence ();
2311 /* Compute into RESULT.
2312 Set RESULT to wherever the result comes back. */
2313 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2314 result, 0);
2316 /* If we were unable to expand via the builtin, stop the sequence
2317 (without outputting the insns) and call to the library function
2318 with the stabilized argument list. */
2319 if (result == 0)
2321 end_sequence ();
2322 return expand_call (exp, target, target == const0_rtx);
2325 /* Output the entire sequence. */
2326 insns = get_insns ();
2327 end_sequence ();
2328 emit_insn (insns);
2330 return result;
2333 /* Expand a call to the builtin sin and cos math functions.
2334 Return NULL_RTX if a normal call should be emitted rather than expanding the
2335 function in-line. EXP is the expression that is a call to the builtin
2336 function; if convenient, the result should be placed in TARGET.
2337 SUBTARGET may be used as the target for computing one of EXP's
2338 operands. */
2340 static rtx
2341 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2343 optab builtin_optab;
2344 rtx op0;
2345 rtx_insn *insns;
2346 tree fndecl = get_callee_fndecl (exp);
2347 machine_mode mode;
2348 tree arg;
2350 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2351 return NULL_RTX;
2353 arg = CALL_EXPR_ARG (exp, 0);
2355 switch (DECL_FUNCTION_CODE (fndecl))
2357 CASE_FLT_FN (BUILT_IN_SIN):
2358 CASE_FLT_FN (BUILT_IN_COS):
2359 builtin_optab = sincos_optab; break;
2360 default:
2361 gcc_unreachable ();
2364 /* Make a suitable register to place result in. */
2365 mode = TYPE_MODE (TREE_TYPE (exp));
2367 /* Check if sincos insn is available, otherwise fallback
2368 to sin or cos insn. */
2369 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2370 switch (DECL_FUNCTION_CODE (fndecl))
2372 CASE_FLT_FN (BUILT_IN_SIN):
2373 builtin_optab = sin_optab; break;
2374 CASE_FLT_FN (BUILT_IN_COS):
2375 builtin_optab = cos_optab; break;
2376 default:
2377 gcc_unreachable ();
2380 /* Before working hard, check whether the instruction is available. */
2381 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2383 rtx result = gen_reg_rtx (mode);
2385 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2386 need to expand the argument again. This way, we will not perform
2387 side-effects more the once. */
2388 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2390 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2392 start_sequence ();
2394 /* Compute into RESULT.
2395 Set RESULT to wherever the result comes back. */
2396 if (builtin_optab == sincos_optab)
2398 int ok;
2400 switch (DECL_FUNCTION_CODE (fndecl))
2402 CASE_FLT_FN (BUILT_IN_SIN):
2403 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2404 break;
2405 CASE_FLT_FN (BUILT_IN_COS):
2406 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2407 break;
2408 default:
2409 gcc_unreachable ();
2411 gcc_assert (ok);
2413 else
2414 result = expand_unop (mode, builtin_optab, op0, result, 0);
2416 if (result != 0)
2418 /* Output the entire sequence. */
2419 insns = get_insns ();
2420 end_sequence ();
2421 emit_insn (insns);
2422 return result;
2425 /* If we were unable to expand via the builtin, stop the sequence
2426 (without outputting the insns) and call to the library function
2427 with the stabilized argument list. */
2428 end_sequence ();
2431 return expand_call (exp, target, target == const0_rtx);
2434 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2435 return an RTL instruction code that implements the functionality.
2436 If that isn't possible or available return CODE_FOR_nothing. */
2438 static enum insn_code
2439 interclass_mathfn_icode (tree arg, tree fndecl)
2441 bool errno_set = false;
2442 optab builtin_optab = unknown_optab;
2443 machine_mode mode;
2445 switch (DECL_FUNCTION_CODE (fndecl))
2447 CASE_FLT_FN (BUILT_IN_ILOGB):
2448 errno_set = true; builtin_optab = ilogb_optab; break;
2449 CASE_FLT_FN (BUILT_IN_ISINF):
2450 builtin_optab = isinf_optab; break;
2451 case BUILT_IN_ISNORMAL:
2452 case BUILT_IN_ISFINITE:
2453 CASE_FLT_FN (BUILT_IN_FINITE):
2454 case BUILT_IN_FINITED32:
2455 case BUILT_IN_FINITED64:
2456 case BUILT_IN_FINITED128:
2457 case BUILT_IN_ISINFD32:
2458 case BUILT_IN_ISINFD64:
2459 case BUILT_IN_ISINFD128:
2460 /* These builtins have no optabs (yet). */
2461 break;
2462 default:
2463 gcc_unreachable ();
2466 /* There's no easy way to detect the case we need to set EDOM. */
2467 if (flag_errno_math && errno_set)
2468 return CODE_FOR_nothing;
2470 /* Optab mode depends on the mode of the input argument. */
2471 mode = TYPE_MODE (TREE_TYPE (arg));
2473 if (builtin_optab)
2474 return optab_handler (builtin_optab, mode);
2475 return CODE_FOR_nothing;
2478 /* Expand a call to one of the builtin math functions that operate on
2479 floating point argument and output an integer result (ilogb, isinf,
2480 isnan, etc).
2481 Return 0 if a normal call should be emitted rather than expanding the
2482 function in-line. EXP is the expression that is a call to the builtin
2483 function; if convenient, the result should be placed in TARGET. */
2485 static rtx
2486 expand_builtin_interclass_mathfn (tree exp, rtx target)
2488 enum insn_code icode = CODE_FOR_nothing;
2489 rtx op0;
2490 tree fndecl = get_callee_fndecl (exp);
2491 machine_mode mode;
2492 tree arg;
2494 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2495 return NULL_RTX;
2497 arg = CALL_EXPR_ARG (exp, 0);
2498 icode = interclass_mathfn_icode (arg, fndecl);
2499 mode = TYPE_MODE (TREE_TYPE (arg));
2501 if (icode != CODE_FOR_nothing)
2503 struct expand_operand ops[1];
2504 rtx_insn *last = get_last_insn ();
2505 tree orig_arg = arg;
2507 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2508 need to expand the argument again. This way, we will not perform
2509 side-effects more the once. */
2510 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2512 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2514 if (mode != GET_MODE (op0))
2515 op0 = convert_to_mode (mode, op0, 0);
2517 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2518 if (maybe_legitimize_operands (icode, 0, 1, ops)
2519 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2520 return ops[0].value;
2522 delete_insns_since (last);
2523 CALL_EXPR_ARG (exp, 0) = orig_arg;
2526 return NULL_RTX;
2529 /* Expand a call to the builtin sincos math function.
2530 Return NULL_RTX if a normal call should be emitted rather than expanding the
2531 function in-line. EXP is the expression that is a call to the builtin
2532 function. */
2534 static rtx
2535 expand_builtin_sincos (tree exp)
2537 rtx op0, op1, op2, target1, target2;
2538 machine_mode mode;
2539 tree arg, sinp, cosp;
2540 int result;
2541 location_t loc = EXPR_LOCATION (exp);
2542 tree alias_type, alias_off;
2544 if (!validate_arglist (exp, REAL_TYPE,
2545 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2546 return NULL_RTX;
2548 arg = CALL_EXPR_ARG (exp, 0);
2549 sinp = CALL_EXPR_ARG (exp, 1);
2550 cosp = CALL_EXPR_ARG (exp, 2);
2552 /* Make a suitable register to place result in. */
2553 mode = TYPE_MODE (TREE_TYPE (arg));
2555 /* Check if sincos insn is available, otherwise emit the call. */
2556 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2557 return NULL_RTX;
2559 target1 = gen_reg_rtx (mode);
2560 target2 = gen_reg_rtx (mode);
2562 op0 = expand_normal (arg);
2563 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2564 alias_off = build_int_cst (alias_type, 0);
2565 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2566 sinp, alias_off));
2567 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2568 cosp, alias_off));
2570 /* Compute into target1 and target2.
2571 Set TARGET to wherever the result comes back. */
2572 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2573 gcc_assert (result);
2575 /* Move target1 and target2 to the memory locations indicated
2576 by op1 and op2. */
2577 emit_move_insn (op1, target1);
2578 emit_move_insn (op2, target2);
2580 return const0_rtx;
2583 /* Expand a call to the internal cexpi builtin to the sincos math function.
2584 EXP is the expression that is a call to the builtin function; if convenient,
2585 the result should be placed in TARGET. */
2587 static rtx
2588 expand_builtin_cexpi (tree exp, rtx target)
2590 tree fndecl = get_callee_fndecl (exp);
2591 tree arg, type;
2592 machine_mode mode;
2593 rtx op0, op1, op2;
2594 location_t loc = EXPR_LOCATION (exp);
2596 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2597 return NULL_RTX;
2599 arg = CALL_EXPR_ARG (exp, 0);
2600 type = TREE_TYPE (arg);
2601 mode = TYPE_MODE (TREE_TYPE (arg));
2603 /* Try expanding via a sincos optab, fall back to emitting a libcall
2604 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2605 is only generated from sincos, cexp or if we have either of them. */
2606 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2608 op1 = gen_reg_rtx (mode);
2609 op2 = gen_reg_rtx (mode);
2611 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2613 /* Compute into op1 and op2. */
2614 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2616 else if (targetm.libc_has_function (function_sincos))
2618 tree call, fn = NULL_TREE;
2619 tree top1, top2;
2620 rtx op1a, op2a;
2622 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2623 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2624 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2625 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2626 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2627 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2628 else
2629 gcc_unreachable ();
2631 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2632 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2633 op1a = copy_addr_to_reg (XEXP (op1, 0));
2634 op2a = copy_addr_to_reg (XEXP (op2, 0));
2635 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2636 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2638 /* Make sure not to fold the sincos call again. */
2639 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2640 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2641 call, 3, arg, top1, top2));
2643 else
2645 tree call, fn = NULL_TREE, narg;
2646 tree ctype = build_complex_type (type);
2648 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2649 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2650 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2651 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2652 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2653 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2654 else
2655 gcc_unreachable ();
2657 /* If we don't have a decl for cexp create one. This is the
2658 friendliest fallback if the user calls __builtin_cexpi
2659 without full target C99 function support. */
2660 if (fn == NULL_TREE)
2662 tree fntype;
2663 const char *name = NULL;
2665 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2666 name = "cexpf";
2667 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2668 name = "cexp";
2669 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2670 name = "cexpl";
2672 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2673 fn = build_fn_decl (name, fntype);
2676 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2677 build_real (type, dconst0), arg);
2679 /* Make sure not to fold the cexp call again. */
2680 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2681 return expand_expr (build_call_nary (ctype, call, 1, narg),
2682 target, VOIDmode, EXPAND_NORMAL);
2685 /* Now build the proper return type. */
2686 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2687 make_tree (TREE_TYPE (arg), op2),
2688 make_tree (TREE_TYPE (arg), op1)),
2689 target, VOIDmode, EXPAND_NORMAL);
2692 /* Conveniently construct a function call expression. FNDECL names the
2693 function to be called, N is the number of arguments, and the "..."
2694 parameters are the argument expressions. Unlike build_call_exr
2695 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2697 static tree
2698 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2700 va_list ap;
2701 tree fntype = TREE_TYPE (fndecl);
2702 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2704 va_start (ap, n);
2705 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2706 va_end (ap);
2707 SET_EXPR_LOCATION (fn, loc);
2708 return fn;
2711 /* Expand a call to one of the builtin rounding functions gcc defines
2712 as an extension (lfloor and lceil). As these are gcc extensions we
2713 do not need to worry about setting errno to EDOM.
2714 If expanding via optab fails, lower expression to (int)(floor(x)).
2715 EXP is the expression that is a call to the builtin function;
2716 if convenient, the result should be placed in TARGET. */
2718 static rtx
2719 expand_builtin_int_roundingfn (tree exp, rtx target)
2721 convert_optab builtin_optab;
2722 rtx op0, tmp;
2723 rtx_insn *insns;
2724 tree fndecl = get_callee_fndecl (exp);
2725 enum built_in_function fallback_fn;
2726 tree fallback_fndecl;
2727 machine_mode mode;
2728 tree arg;
2730 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2731 gcc_unreachable ();
2733 arg = CALL_EXPR_ARG (exp, 0);
2735 switch (DECL_FUNCTION_CODE (fndecl))
2737 CASE_FLT_FN (BUILT_IN_ICEIL):
2738 CASE_FLT_FN (BUILT_IN_LCEIL):
2739 CASE_FLT_FN (BUILT_IN_LLCEIL):
2740 builtin_optab = lceil_optab;
2741 fallback_fn = BUILT_IN_CEIL;
2742 break;
2744 CASE_FLT_FN (BUILT_IN_IFLOOR):
2745 CASE_FLT_FN (BUILT_IN_LFLOOR):
2746 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2747 builtin_optab = lfloor_optab;
2748 fallback_fn = BUILT_IN_FLOOR;
2749 break;
2751 default:
2752 gcc_unreachable ();
2755 /* Make a suitable register to place result in. */
2756 mode = TYPE_MODE (TREE_TYPE (exp));
2758 target = gen_reg_rtx (mode);
2760 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2761 need to expand the argument again. This way, we will not perform
2762 side-effects more the once. */
2763 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2765 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2767 start_sequence ();
2769 /* Compute into TARGET. */
2770 if (expand_sfix_optab (target, op0, builtin_optab))
2772 /* Output the entire sequence. */
2773 insns = get_insns ();
2774 end_sequence ();
2775 emit_insn (insns);
2776 return target;
2779 /* If we were unable to expand via the builtin, stop the sequence
2780 (without outputting the insns). */
2781 end_sequence ();
2783 /* Fall back to floating point rounding optab. */
2784 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2786 /* For non-C99 targets we may end up without a fallback fndecl here
2787 if the user called __builtin_lfloor directly. In this case emit
2788 a call to the floor/ceil variants nevertheless. This should result
2789 in the best user experience for not full C99 targets. */
2790 if (fallback_fndecl == NULL_TREE)
2792 tree fntype;
2793 const char *name = NULL;
2795 switch (DECL_FUNCTION_CODE (fndecl))
2797 case BUILT_IN_ICEIL:
2798 case BUILT_IN_LCEIL:
2799 case BUILT_IN_LLCEIL:
2800 name = "ceil";
2801 break;
2802 case BUILT_IN_ICEILF:
2803 case BUILT_IN_LCEILF:
2804 case BUILT_IN_LLCEILF:
2805 name = "ceilf";
2806 break;
2807 case BUILT_IN_ICEILL:
2808 case BUILT_IN_LCEILL:
2809 case BUILT_IN_LLCEILL:
2810 name = "ceill";
2811 break;
2812 case BUILT_IN_IFLOOR:
2813 case BUILT_IN_LFLOOR:
2814 case BUILT_IN_LLFLOOR:
2815 name = "floor";
2816 break;
2817 case BUILT_IN_IFLOORF:
2818 case BUILT_IN_LFLOORF:
2819 case BUILT_IN_LLFLOORF:
2820 name = "floorf";
2821 break;
2822 case BUILT_IN_IFLOORL:
2823 case BUILT_IN_LFLOORL:
2824 case BUILT_IN_LLFLOORL:
2825 name = "floorl";
2826 break;
2827 default:
2828 gcc_unreachable ();
2831 fntype = build_function_type_list (TREE_TYPE (arg),
2832 TREE_TYPE (arg), NULL_TREE);
2833 fallback_fndecl = build_fn_decl (name, fntype);
2836 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2838 tmp = expand_normal (exp);
2839 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2841 /* Truncate the result of floating point optab to integer
2842 via expand_fix (). */
2843 target = gen_reg_rtx (mode);
2844 expand_fix (target, tmp, 0);
2846 return target;
2849 /* Expand a call to one of the builtin math functions doing integer
2850 conversion (lrint).
2851 Return 0 if a normal call should be emitted rather than expanding the
2852 function in-line. EXP is the expression that is a call to the builtin
2853 function; if convenient, the result should be placed in TARGET. */
2855 static rtx
2856 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2858 convert_optab builtin_optab;
2859 rtx op0;
2860 rtx_insn *insns;
2861 tree fndecl = get_callee_fndecl (exp);
2862 tree arg;
2863 machine_mode mode;
2864 enum built_in_function fallback_fn = BUILT_IN_NONE;
2866 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2867 gcc_unreachable ();
2869 arg = CALL_EXPR_ARG (exp, 0);
2871 switch (DECL_FUNCTION_CODE (fndecl))
2873 CASE_FLT_FN (BUILT_IN_IRINT):
2874 fallback_fn = BUILT_IN_LRINT;
2875 /* FALLTHRU */
2876 CASE_FLT_FN (BUILT_IN_LRINT):
2877 CASE_FLT_FN (BUILT_IN_LLRINT):
2878 builtin_optab = lrint_optab;
2879 break;
2881 CASE_FLT_FN (BUILT_IN_IROUND):
2882 fallback_fn = BUILT_IN_LROUND;
2883 /* FALLTHRU */
2884 CASE_FLT_FN (BUILT_IN_LROUND):
2885 CASE_FLT_FN (BUILT_IN_LLROUND):
2886 builtin_optab = lround_optab;
2887 break;
2889 default:
2890 gcc_unreachable ();
2893 /* There's no easy way to detect the case we need to set EDOM. */
2894 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2895 return NULL_RTX;
2897 /* Make a suitable register to place result in. */
2898 mode = TYPE_MODE (TREE_TYPE (exp));
2900 /* There's no easy way to detect the case we need to set EDOM. */
2901 if (!flag_errno_math)
2903 rtx result = gen_reg_rtx (mode);
2905 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2906 need to expand the argument again. This way, we will not perform
2907 side-effects more the once. */
2908 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2910 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2912 start_sequence ();
2914 if (expand_sfix_optab (result, op0, builtin_optab))
2916 /* Output the entire sequence. */
2917 insns = get_insns ();
2918 end_sequence ();
2919 emit_insn (insns);
2920 return result;
2923 /* If we were unable to expand via the builtin, stop the sequence
2924 (without outputting the insns) and call to the library function
2925 with the stabilized argument list. */
2926 end_sequence ();
2929 if (fallback_fn != BUILT_IN_NONE)
2931 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2932 targets, (int) round (x) should never be transformed into
2933 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2934 a call to lround in the hope that the target provides at least some
2935 C99 functions. This should result in the best user experience for
2936 not full C99 targets. */
2937 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2938 fallback_fn, 0);
2940 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2941 fallback_fndecl, 1, arg);
2943 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2944 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2945 return convert_to_mode (mode, target, 0);
2948 return expand_call (exp, target, target == const0_rtx);
2951 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2952 a normal call should be emitted rather than expanding the function
2953 in-line. EXP is the expression that is a call to the builtin
2954 function; if convenient, the result should be placed in TARGET. */
2956 static rtx
2957 expand_builtin_powi (tree exp, rtx target)
2959 tree arg0, arg1;
2960 rtx op0, op1;
2961 machine_mode mode;
2962 machine_mode mode2;
2964 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2965 return NULL_RTX;
2967 arg0 = CALL_EXPR_ARG (exp, 0);
2968 arg1 = CALL_EXPR_ARG (exp, 1);
2969 mode = TYPE_MODE (TREE_TYPE (exp));
2971 /* Emit a libcall to libgcc. */
2973 /* Mode of the 2nd argument must match that of an int. */
2974 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2976 if (target == NULL_RTX)
2977 target = gen_reg_rtx (mode);
2979 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2980 if (GET_MODE (op0) != mode)
2981 op0 = convert_to_mode (mode, op0, 0);
2982 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2983 if (GET_MODE (op1) != mode2)
2984 op1 = convert_to_mode (mode2, op1, 0);
2986 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2987 target, LCT_CONST, mode, 2,
2988 op0, mode, op1, mode2);
2990 return target;
2993 /* Expand expression EXP which is a call to the strlen builtin. Return
2994 NULL_RTX if we failed the caller should emit a normal call, otherwise
2995 try to get the result in TARGET, if convenient. */
2997 static rtx
2998 expand_builtin_strlen (tree exp, rtx target,
2999 machine_mode target_mode)
3001 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3002 return NULL_RTX;
3003 else
3005 struct expand_operand ops[4];
3006 rtx pat;
3007 tree len;
3008 tree src = CALL_EXPR_ARG (exp, 0);
3009 rtx src_reg;
3010 rtx_insn *before_strlen;
3011 machine_mode insn_mode = target_mode;
3012 enum insn_code icode = CODE_FOR_nothing;
3013 unsigned int align;
3015 /* If the length can be computed at compile-time, return it. */
3016 len = c_strlen (src, 0);
3017 if (len)
3018 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3020 /* If the length can be computed at compile-time and is constant
3021 integer, but there are side-effects in src, evaluate
3022 src for side-effects, then return len.
3023 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3024 can be optimized into: i++; x = 3; */
3025 len = c_strlen (src, 1);
3026 if (len && TREE_CODE (len) == INTEGER_CST)
3028 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3029 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3032 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3034 /* If SRC is not a pointer type, don't do this operation inline. */
3035 if (align == 0)
3036 return NULL_RTX;
3038 /* Bail out if we can't compute strlen in the right mode. */
3039 while (insn_mode != VOIDmode)
3041 icode = optab_handler (strlen_optab, insn_mode);
3042 if (icode != CODE_FOR_nothing)
3043 break;
3045 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3047 if (insn_mode == VOIDmode)
3048 return NULL_RTX;
3050 /* Make a place to hold the source address. We will not expand
3051 the actual source until we are sure that the expansion will
3052 not fail -- there are trees that cannot be expanded twice. */
3053 src_reg = gen_reg_rtx (Pmode);
3055 /* Mark the beginning of the strlen sequence so we can emit the
3056 source operand later. */
3057 before_strlen = get_last_insn ();
3059 create_output_operand (&ops[0], target, insn_mode);
3060 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3061 create_integer_operand (&ops[2], 0);
3062 create_integer_operand (&ops[3], align);
3063 if (!maybe_expand_insn (icode, 4, ops))
3064 return NULL_RTX;
3066 /* Now that we are assured of success, expand the source. */
3067 start_sequence ();
3068 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3069 if (pat != src_reg)
3071 #ifdef POINTERS_EXTEND_UNSIGNED
3072 if (GET_MODE (pat) != Pmode)
3073 pat = convert_to_mode (Pmode, pat,
3074 POINTERS_EXTEND_UNSIGNED);
3075 #endif
3076 emit_move_insn (src_reg, pat);
3078 pat = get_insns ();
3079 end_sequence ();
3081 if (before_strlen)
3082 emit_insn_after (pat, before_strlen);
3083 else
3084 emit_insn_before (pat, get_insns ());
3086 /* Return the value in the proper mode for this function. */
3087 if (GET_MODE (ops[0].value) == target_mode)
3088 target = ops[0].value;
3089 else if (target != 0)
3090 convert_move (target, ops[0].value, 0);
3091 else
3092 target = convert_to_mode (target_mode, ops[0].value, 0);
3094 return target;
3098 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3099 bytes from constant string DATA + OFFSET and return it as target
3100 constant. */
3102 static rtx
3103 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3104 machine_mode mode)
3106 const char *str = (const char *) data;
3108 gcc_assert (offset >= 0
3109 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3110 <= strlen (str) + 1));
3112 return c_readstr (str + offset, mode);
3115 /* LEN specify length of the block of memcpy/memset operation.
3116 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3117 In some cases we can make very likely guess on max size, then we
3118 set it into PROBABLE_MAX_SIZE. */
3120 static void
3121 determine_block_size (tree len, rtx len_rtx,
3122 unsigned HOST_WIDE_INT *min_size,
3123 unsigned HOST_WIDE_INT *max_size,
3124 unsigned HOST_WIDE_INT *probable_max_size)
3126 if (CONST_INT_P (len_rtx))
3128 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3129 return;
3131 else
3133 wide_int min, max;
3134 enum value_range_type range_type = VR_UNDEFINED;
3136 /* Determine bounds from the type. */
3137 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3138 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3139 else
3140 *min_size = 0;
3141 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3142 *probable_max_size = *max_size
3143 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3144 else
3145 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3147 if (TREE_CODE (len) == SSA_NAME)
3148 range_type = get_range_info (len, &min, &max);
3149 if (range_type == VR_RANGE)
3151 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3152 *min_size = min.to_uhwi ();
3153 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3154 *probable_max_size = *max_size = max.to_uhwi ();
3156 else if (range_type == VR_ANTI_RANGE)
3158 /* Anti range 0...N lets us to determine minimal size to N+1. */
3159 if (min == 0)
3161 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3162 *min_size = max.to_uhwi () + 1;
3164 /* Code like
3166 int n;
3167 if (n < 100)
3168 memcpy (a, b, n)
3170 Produce anti range allowing negative values of N. We still
3171 can use the information and make a guess that N is not negative.
3173 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3174 *probable_max_size = min.to_uhwi () - 1;
3177 gcc_checking_assert (*max_size <=
3178 (unsigned HOST_WIDE_INT)
3179 GET_MODE_MASK (GET_MODE (len_rtx)));
3182 /* Helper function to do the actual work for expand_builtin_memcpy. */
3184 static rtx
3185 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3187 const char *src_str;
3188 unsigned int src_align = get_pointer_alignment (src);
3189 unsigned int dest_align = get_pointer_alignment (dest);
3190 rtx dest_mem, src_mem, dest_addr, len_rtx;
3191 HOST_WIDE_INT expected_size = -1;
3192 unsigned int expected_align = 0;
3193 unsigned HOST_WIDE_INT min_size;
3194 unsigned HOST_WIDE_INT max_size;
3195 unsigned HOST_WIDE_INT probable_max_size;
3197 /* If DEST is not a pointer type, call the normal function. */
3198 if (dest_align == 0)
3199 return NULL_RTX;
3201 /* If either SRC is not a pointer type, don't do this
3202 operation in-line. */
3203 if (src_align == 0)
3204 return NULL_RTX;
3206 if (currently_expanding_gimple_stmt)
3207 stringop_block_profile (currently_expanding_gimple_stmt,
3208 &expected_align, &expected_size);
3210 if (expected_align < dest_align)
3211 expected_align = dest_align;
3212 dest_mem = get_memory_rtx (dest, len);
3213 set_mem_align (dest_mem, dest_align);
3214 len_rtx = expand_normal (len);
3215 determine_block_size (len, len_rtx, &min_size, &max_size,
3216 &probable_max_size);
3217 src_str = c_getstr (src);
3219 /* If SRC is a string constant and block move would be done
3220 by pieces, we can avoid loading the string from memory
3221 and only stored the computed constants. */
3222 if (src_str
3223 && CONST_INT_P (len_rtx)
3224 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3225 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3226 CONST_CAST (char *, src_str),
3227 dest_align, false))
3229 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3230 builtin_memcpy_read_str,
3231 CONST_CAST (char *, src_str),
3232 dest_align, false, 0);
3233 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3234 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3235 return dest_mem;
3238 src_mem = get_memory_rtx (src, len);
3239 set_mem_align (src_mem, src_align);
3241 /* Copy word part most expediently. */
3242 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3243 CALL_EXPR_TAILCALL (exp)
3244 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3245 expected_align, expected_size,
3246 min_size, max_size, probable_max_size);
3248 if (dest_addr == 0)
3250 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3251 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3254 return dest_addr;
3257 /* Expand a call EXP to the memcpy builtin.
3258 Return NULL_RTX if we failed, the caller should emit a normal call,
3259 otherwise try to get the result in TARGET, if convenient (and in
3260 mode MODE if that's convenient). */
3262 static rtx
3263 expand_builtin_memcpy (tree exp, rtx target)
3265 if (!validate_arglist (exp,
3266 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3267 return NULL_RTX;
3268 else
3270 tree dest = CALL_EXPR_ARG (exp, 0);
3271 tree src = CALL_EXPR_ARG (exp, 1);
3272 tree len = CALL_EXPR_ARG (exp, 2);
3273 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3277 /* Expand an instrumented call EXP to the memcpy builtin.
3278 Return NULL_RTX if we failed, the caller should emit a normal call,
3279 otherwise try to get the result in TARGET, if convenient (and in
3280 mode MODE if that's convenient). */
3282 static rtx
3283 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3285 if (!validate_arglist (exp,
3286 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3287 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3288 INTEGER_TYPE, VOID_TYPE))
3289 return NULL_RTX;
3290 else
3292 tree dest = CALL_EXPR_ARG (exp, 0);
3293 tree src = CALL_EXPR_ARG (exp, 2);
3294 tree len = CALL_EXPR_ARG (exp, 4);
3295 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3297 /* Return src bounds with the result. */
3298 if (res)
3300 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3301 expand_normal (CALL_EXPR_ARG (exp, 1)));
3302 res = chkp_join_splitted_slot (res, bnd);
3304 return res;
3308 /* Expand a call EXP to the mempcpy builtin.
3309 Return NULL_RTX if we failed; the caller should emit a normal call,
3310 otherwise try to get the result in TARGET, if convenient (and in
3311 mode MODE if that's convenient). If ENDP is 0 return the
3312 destination pointer, if ENDP is 1 return the end pointer ala
3313 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3314 stpcpy. */
3316 static rtx
3317 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3319 if (!validate_arglist (exp,
3320 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3321 return NULL_RTX;
3322 else
3324 tree dest = CALL_EXPR_ARG (exp, 0);
3325 tree src = CALL_EXPR_ARG (exp, 1);
3326 tree len = CALL_EXPR_ARG (exp, 2);
3327 return expand_builtin_mempcpy_args (dest, src, len,
3328 target, mode, /*endp=*/ 1,
3329 exp);
3333 /* Expand an instrumented call EXP to the mempcpy builtin.
3334 Return NULL_RTX if we failed, the caller should emit a normal call,
3335 otherwise try to get the result in TARGET, if convenient (and in
3336 mode MODE if that's convenient). */
3338 static rtx
3339 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3341 if (!validate_arglist (exp,
3342 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3343 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3344 INTEGER_TYPE, VOID_TYPE))
3345 return NULL_RTX;
3346 else
3348 tree dest = CALL_EXPR_ARG (exp, 0);
3349 tree src = CALL_EXPR_ARG (exp, 2);
3350 tree len = CALL_EXPR_ARG (exp, 4);
3351 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3352 mode, 1, exp);
3354 /* Return src bounds with the result. */
3355 if (res)
3357 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3358 expand_normal (CALL_EXPR_ARG (exp, 1)));
3359 res = chkp_join_splitted_slot (res, bnd);
3361 return res;
3365 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3366 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3367 so that this can also be called without constructing an actual CALL_EXPR.
3368 The other arguments and return value are the same as for
3369 expand_builtin_mempcpy. */
3371 static rtx
3372 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3373 rtx target, machine_mode mode, int endp,
3374 tree orig_exp)
3376 tree fndecl = get_callee_fndecl (orig_exp);
3378 /* If return value is ignored, transform mempcpy into memcpy. */
3379 if (target == const0_rtx
3380 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3381 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3383 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3384 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3385 dest, src, len);
3386 return expand_expr (result, target, mode, EXPAND_NORMAL);
3388 else if (target == const0_rtx
3389 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3391 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3392 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3393 dest, src, len);
3394 return expand_expr (result, target, mode, EXPAND_NORMAL);
3396 else
3398 const char *src_str;
3399 unsigned int src_align = get_pointer_alignment (src);
3400 unsigned int dest_align = get_pointer_alignment (dest);
3401 rtx dest_mem, src_mem, len_rtx;
3403 /* If either SRC or DEST is not a pointer type, don't do this
3404 operation in-line. */
3405 if (dest_align == 0 || src_align == 0)
3406 return NULL_RTX;
3408 /* If LEN is not constant, call the normal function. */
3409 if (! tree_fits_uhwi_p (len))
3410 return NULL_RTX;
3412 len_rtx = expand_normal (len);
3413 src_str = c_getstr (src);
3415 /* If SRC is a string constant and block move would be done
3416 by pieces, we can avoid loading the string from memory
3417 and only stored the computed constants. */
3418 if (src_str
3419 && CONST_INT_P (len_rtx)
3420 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3421 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3422 CONST_CAST (char *, src_str),
3423 dest_align, false))
3425 dest_mem = get_memory_rtx (dest, len);
3426 set_mem_align (dest_mem, dest_align);
3427 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3428 builtin_memcpy_read_str,
3429 CONST_CAST (char *, src_str),
3430 dest_align, false, endp);
3431 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3432 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3433 return dest_mem;
3436 if (CONST_INT_P (len_rtx)
3437 && can_move_by_pieces (INTVAL (len_rtx),
3438 MIN (dest_align, src_align)))
3440 dest_mem = get_memory_rtx (dest, len);
3441 set_mem_align (dest_mem, dest_align);
3442 src_mem = get_memory_rtx (src, len);
3443 set_mem_align (src_mem, src_align);
3444 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3445 MIN (dest_align, src_align), endp);
3446 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3447 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3448 return dest_mem;
3451 return NULL_RTX;
3455 #ifndef HAVE_movstr
3456 # define HAVE_movstr 0
3457 # define CODE_FOR_movstr CODE_FOR_nothing
3458 #endif
3460 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3461 we failed, the caller should emit a normal call, otherwise try to
3462 get the result in TARGET, if convenient. If ENDP is 0 return the
3463 destination pointer, if ENDP is 1 return the end pointer ala
3464 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3465 stpcpy. */
3467 static rtx
3468 expand_movstr (tree dest, tree src, rtx target, int endp)
3470 struct expand_operand ops[3];
3471 rtx dest_mem;
3472 rtx src_mem;
3474 if (!HAVE_movstr)
3475 return NULL_RTX;
3477 dest_mem = get_memory_rtx (dest, NULL);
3478 src_mem = get_memory_rtx (src, NULL);
3479 if (!endp)
3481 target = force_reg (Pmode, XEXP (dest_mem, 0));
3482 dest_mem = replace_equiv_address (dest_mem, target);
3485 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3486 create_fixed_operand (&ops[1], dest_mem);
3487 create_fixed_operand (&ops[2], src_mem);
3488 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3489 return NULL_RTX;
3491 if (endp && target != const0_rtx)
3493 target = ops[0].value;
3494 /* movstr is supposed to set end to the address of the NUL
3495 terminator. If the caller requested a mempcpy-like return value,
3496 adjust it. */
3497 if (endp == 1)
3499 rtx tem = plus_constant (GET_MODE (target),
3500 gen_lowpart (GET_MODE (target), target), 1);
3501 emit_move_insn (target, force_operand (tem, NULL_RTX));
3504 return target;
3507 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3508 NULL_RTX if we failed the caller should emit a normal call, otherwise
3509 try to get the result in TARGET, if convenient (and in mode MODE if that's
3510 convenient). */
3512 static rtx
3513 expand_builtin_strcpy (tree exp, rtx target)
3515 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3517 tree dest = CALL_EXPR_ARG (exp, 0);
3518 tree src = CALL_EXPR_ARG (exp, 1);
3519 return expand_builtin_strcpy_args (dest, src, target);
3521 return NULL_RTX;
3524 /* Helper function to do the actual work for expand_builtin_strcpy. The
3525 arguments to the builtin_strcpy call DEST and SRC are broken out
3526 so that this can also be called without constructing an actual CALL_EXPR.
3527 The other arguments and return value are the same as for
3528 expand_builtin_strcpy. */
3530 static rtx
3531 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3533 return expand_movstr (dest, src, target, /*endp=*/0);
3536 /* Expand a call EXP to the stpcpy builtin.
3537 Return NULL_RTX if we failed the caller should emit a normal call,
3538 otherwise try to get the result in TARGET, if convenient (and in
3539 mode MODE if that's convenient). */
3541 static rtx
3542 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3544 tree dst, src;
3545 location_t loc = EXPR_LOCATION (exp);
3547 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3548 return NULL_RTX;
3550 dst = CALL_EXPR_ARG (exp, 0);
3551 src = CALL_EXPR_ARG (exp, 1);
3553 /* If return value is ignored, transform stpcpy into strcpy. */
3554 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3556 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3557 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3558 return expand_expr (result, target, mode, EXPAND_NORMAL);
3560 else
3562 tree len, lenp1;
3563 rtx ret;
3565 /* Ensure we get an actual string whose length can be evaluated at
3566 compile-time, not an expression containing a string. This is
3567 because the latter will potentially produce pessimized code
3568 when used to produce the return value. */
3569 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3570 return expand_movstr (dst, src, target, /*endp=*/2);
3572 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3573 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3574 target, mode, /*endp=*/2,
3575 exp);
3577 if (ret)
3578 return ret;
3580 if (TREE_CODE (len) == INTEGER_CST)
3582 rtx len_rtx = expand_normal (len);
3584 if (CONST_INT_P (len_rtx))
3586 ret = expand_builtin_strcpy_args (dst, src, target);
3588 if (ret)
3590 if (! target)
3592 if (mode != VOIDmode)
3593 target = gen_reg_rtx (mode);
3594 else
3595 target = gen_reg_rtx (GET_MODE (ret));
3597 if (GET_MODE (target) != GET_MODE (ret))
3598 ret = gen_lowpart (GET_MODE (target), ret);
3600 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3601 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3602 gcc_assert (ret);
3604 return target;
3609 return expand_movstr (dst, src, target, /*endp=*/2);
3613 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3614 bytes from constant string DATA + OFFSET and return it as target
3615 constant. */
3618 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3619 machine_mode mode)
3621 const char *str = (const char *) data;
3623 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3624 return const0_rtx;
3626 return c_readstr (str + offset, mode);
3629 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3630 NULL_RTX if we failed the caller should emit a normal call. */
3632 static rtx
3633 expand_builtin_strncpy (tree exp, rtx target)
3635 location_t loc = EXPR_LOCATION (exp);
3637 if (validate_arglist (exp,
3638 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3640 tree dest = CALL_EXPR_ARG (exp, 0);
3641 tree src = CALL_EXPR_ARG (exp, 1);
3642 tree len = CALL_EXPR_ARG (exp, 2);
3643 tree slen = c_strlen (src, 1);
3645 /* We must be passed a constant len and src parameter. */
3646 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3647 return NULL_RTX;
3649 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3651 /* We're required to pad with trailing zeros if the requested
3652 len is greater than strlen(s2)+1. In that case try to
3653 use store_by_pieces, if it fails, punt. */
3654 if (tree_int_cst_lt (slen, len))
3656 unsigned int dest_align = get_pointer_alignment (dest);
3657 const char *p = c_getstr (src);
3658 rtx dest_mem;
3660 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3661 || !can_store_by_pieces (tree_to_uhwi (len),
3662 builtin_strncpy_read_str,
3663 CONST_CAST (char *, p),
3664 dest_align, false))
3665 return NULL_RTX;
3667 dest_mem = get_memory_rtx (dest, len);
3668 store_by_pieces (dest_mem, tree_to_uhwi (len),
3669 builtin_strncpy_read_str,
3670 CONST_CAST (char *, p), dest_align, false, 0);
3671 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3672 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3673 return dest_mem;
3676 return NULL_RTX;
3679 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3680 bytes from constant string DATA + OFFSET and return it as target
3681 constant. */
3684 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3685 machine_mode mode)
3687 const char *c = (const char *) data;
3688 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3690 memset (p, *c, GET_MODE_SIZE (mode));
3692 return c_readstr (p, mode);
3695 /* Callback routine for store_by_pieces. Return the RTL of a register
3696 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3697 char value given in the RTL register data. For example, if mode is
3698 4 bytes wide, return the RTL for 0x01010101*data. */
3700 static rtx
3701 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3702 machine_mode mode)
3704 rtx target, coeff;
3705 size_t size;
3706 char *p;
3708 size = GET_MODE_SIZE (mode);
3709 if (size == 1)
3710 return (rtx) data;
3712 p = XALLOCAVEC (char, size);
3713 memset (p, 1, size);
3714 coeff = c_readstr (p, mode);
3716 target = convert_to_mode (mode, (rtx) data, 1);
3717 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3718 return force_reg (mode, target);
3721 /* Expand expression EXP, which is a call to the memset builtin. Return
3722 NULL_RTX if we failed the caller should emit a normal call, otherwise
3723 try to get the result in TARGET, if convenient (and in mode MODE if that's
3724 convenient). */
3726 static rtx
3727 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3729 if (!validate_arglist (exp,
3730 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3731 return NULL_RTX;
3732 else
3734 tree dest = CALL_EXPR_ARG (exp, 0);
3735 tree val = CALL_EXPR_ARG (exp, 1);
3736 tree len = CALL_EXPR_ARG (exp, 2);
3737 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3741 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3742 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3743 try to get the result in TARGET, if convenient (and in mode MODE if that's
3744 convenient). */
3746 static rtx
3747 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3749 if (!validate_arglist (exp,
3750 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3751 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3752 return NULL_RTX;
3753 else
3755 tree dest = CALL_EXPR_ARG (exp, 0);
3756 tree val = CALL_EXPR_ARG (exp, 2);
3757 tree len = CALL_EXPR_ARG (exp, 3);
3758 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3760 /* Return src bounds with the result. */
3761 if (res)
3763 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3764 expand_normal (CALL_EXPR_ARG (exp, 1)));
3765 res = chkp_join_splitted_slot (res, bnd);
3767 return res;
3771 /* Helper function to do the actual work for expand_builtin_memset. The
3772 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3773 so that this can also be called without constructing an actual CALL_EXPR.
3774 The other arguments and return value are the same as for
3775 expand_builtin_memset. */
3777 static rtx
3778 expand_builtin_memset_args (tree dest, tree val, tree len,
3779 rtx target, machine_mode mode, tree orig_exp)
3781 tree fndecl, fn;
3782 enum built_in_function fcode;
3783 machine_mode val_mode;
3784 char c;
3785 unsigned int dest_align;
3786 rtx dest_mem, dest_addr, len_rtx;
3787 HOST_WIDE_INT expected_size = -1;
3788 unsigned int expected_align = 0;
3789 unsigned HOST_WIDE_INT min_size;
3790 unsigned HOST_WIDE_INT max_size;
3791 unsigned HOST_WIDE_INT probable_max_size;
3793 dest_align = get_pointer_alignment (dest);
3795 /* If DEST is not a pointer type, don't do this operation in-line. */
3796 if (dest_align == 0)
3797 return NULL_RTX;
3799 if (currently_expanding_gimple_stmt)
3800 stringop_block_profile (currently_expanding_gimple_stmt,
3801 &expected_align, &expected_size);
3803 if (expected_align < dest_align)
3804 expected_align = dest_align;
3806 /* If the LEN parameter is zero, return DEST. */
3807 if (integer_zerop (len))
3809 /* Evaluate and ignore VAL in case it has side-effects. */
3810 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3811 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3814 /* Stabilize the arguments in case we fail. */
3815 dest = builtin_save_expr (dest);
3816 val = builtin_save_expr (val);
3817 len = builtin_save_expr (len);
3819 len_rtx = expand_normal (len);
3820 determine_block_size (len, len_rtx, &min_size, &max_size,
3821 &probable_max_size);
3822 dest_mem = get_memory_rtx (dest, len);
3823 val_mode = TYPE_MODE (unsigned_char_type_node);
3825 if (TREE_CODE (val) != INTEGER_CST)
3827 rtx val_rtx;
3829 val_rtx = expand_normal (val);
3830 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3832 /* Assume that we can memset by pieces if we can store
3833 * the coefficients by pieces (in the required modes).
3834 * We can't pass builtin_memset_gen_str as that emits RTL. */
3835 c = 1;
3836 if (tree_fits_uhwi_p (len)
3837 && can_store_by_pieces (tree_to_uhwi (len),
3838 builtin_memset_read_str, &c, dest_align,
3839 true))
3841 val_rtx = force_reg (val_mode, val_rtx);
3842 store_by_pieces (dest_mem, tree_to_uhwi (len),
3843 builtin_memset_gen_str, val_rtx, dest_align,
3844 true, 0);
3846 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3847 dest_align, expected_align,
3848 expected_size, min_size, max_size,
3849 probable_max_size))
3850 goto do_libcall;
3852 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3853 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3854 return dest_mem;
3857 if (target_char_cast (val, &c))
3858 goto do_libcall;
3860 if (c)
3862 if (tree_fits_uhwi_p (len)
3863 && can_store_by_pieces (tree_to_uhwi (len),
3864 builtin_memset_read_str, &c, dest_align,
3865 true))
3866 store_by_pieces (dest_mem, tree_to_uhwi (len),
3867 builtin_memset_read_str, &c, dest_align, true, 0);
3868 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3869 gen_int_mode (c, val_mode),
3870 dest_align, expected_align,
3871 expected_size, min_size, max_size,
3872 probable_max_size))
3873 goto do_libcall;
3875 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3876 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3877 return dest_mem;
3880 set_mem_align (dest_mem, dest_align);
3881 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3882 CALL_EXPR_TAILCALL (orig_exp)
3883 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3884 expected_align, expected_size,
3885 min_size, max_size,
3886 probable_max_size);
3888 if (dest_addr == 0)
3890 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3891 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3894 return dest_addr;
3896 do_libcall:
3897 fndecl = get_callee_fndecl (orig_exp);
3898 fcode = DECL_FUNCTION_CODE (fndecl);
3899 if (fcode == BUILT_IN_MEMSET
3900 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3901 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3902 dest, val, len);
3903 else if (fcode == BUILT_IN_BZERO)
3904 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3905 dest, len);
3906 else
3907 gcc_unreachable ();
3908 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3909 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3910 return expand_call (fn, target, target == const0_rtx);
3913 /* Expand expression EXP, which is a call to the bzero builtin. Return
3914 NULL_RTX if we failed the caller should emit a normal call. */
3916 static rtx
3917 expand_builtin_bzero (tree exp)
3919 tree dest, size;
3920 location_t loc = EXPR_LOCATION (exp);
3922 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3923 return NULL_RTX;
3925 dest = CALL_EXPR_ARG (exp, 0);
3926 size = CALL_EXPR_ARG (exp, 1);
3928 /* New argument list transforming bzero(ptr x, int y) to
3929 memset(ptr x, int 0, size_t y). This is done this way
3930 so that if it isn't expanded inline, we fallback to
3931 calling bzero instead of memset. */
3933 return expand_builtin_memset_args (dest, integer_zero_node,
3934 fold_convert_loc (loc,
3935 size_type_node, size),
3936 const0_rtx, VOIDmode, exp);
3939 /* Expand expression EXP, which is a call to the memcmp built-in function.
3940 Return NULL_RTX if we failed and the caller should emit a normal call,
3941 otherwise try to get the result in TARGET, if convenient (and in mode
3942 MODE, if that's convenient). */
3944 static rtx
3945 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3946 ATTRIBUTE_UNUSED machine_mode mode)
3948 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3950 if (!validate_arglist (exp,
3951 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3952 return NULL_RTX;
3954 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3955 implementing memcmp because it will stop if it encounters two
3956 zero bytes. */
3957 #if defined HAVE_cmpmemsi
3959 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3960 rtx result;
3961 rtx insn;
3962 tree arg1 = CALL_EXPR_ARG (exp, 0);
3963 tree arg2 = CALL_EXPR_ARG (exp, 1);
3964 tree len = CALL_EXPR_ARG (exp, 2);
3966 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3967 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3968 machine_mode insn_mode;
3970 if (HAVE_cmpmemsi)
3971 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3972 else
3973 return NULL_RTX;
3975 /* If we don't have POINTER_TYPE, call the function. */
3976 if (arg1_align == 0 || arg2_align == 0)
3977 return NULL_RTX;
3979 /* Make a place to write the result of the instruction. */
3980 result = target;
3981 if (! (result != 0
3982 && REG_P (result) && GET_MODE (result) == insn_mode
3983 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3984 result = gen_reg_rtx (insn_mode);
3986 arg1_rtx = get_memory_rtx (arg1, len);
3987 arg2_rtx = get_memory_rtx (arg2, len);
3988 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3990 /* Set MEM_SIZE as appropriate. */
3991 if (CONST_INT_P (arg3_rtx))
3993 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3994 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3997 if (HAVE_cmpmemsi)
3998 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3999 GEN_INT (MIN (arg1_align, arg2_align)));
4000 else
4001 gcc_unreachable ();
4003 if (insn)
4004 emit_insn (insn);
4005 else
4006 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4007 TYPE_MODE (integer_type_node), 3,
4008 XEXP (arg1_rtx, 0), Pmode,
4009 XEXP (arg2_rtx, 0), Pmode,
4010 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4011 TYPE_UNSIGNED (sizetype)),
4012 TYPE_MODE (sizetype));
4014 /* Return the value in the proper mode for this function. */
4015 mode = TYPE_MODE (TREE_TYPE (exp));
4016 if (GET_MODE (result) == mode)
4017 return result;
4018 else if (target != 0)
4020 convert_move (target, result, 0);
4021 return target;
4023 else
4024 return convert_to_mode (mode, result, 0);
4026 #endif /* HAVE_cmpmemsi. */
4028 return NULL_RTX;
4031 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4032 if we failed the caller should emit a normal call, otherwise try to get
4033 the result in TARGET, if convenient. */
4035 static rtx
4036 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4038 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4039 return NULL_RTX;
4041 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4042 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4043 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4045 rtx arg1_rtx, arg2_rtx;
4046 rtx result, insn = NULL_RTX;
4047 tree fndecl, fn;
4048 tree arg1 = CALL_EXPR_ARG (exp, 0);
4049 tree arg2 = CALL_EXPR_ARG (exp, 1);
4051 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4052 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4054 /* If we don't have POINTER_TYPE, call the function. */
4055 if (arg1_align == 0 || arg2_align == 0)
4056 return NULL_RTX;
4058 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4059 arg1 = builtin_save_expr (arg1);
4060 arg2 = builtin_save_expr (arg2);
4062 arg1_rtx = get_memory_rtx (arg1, NULL);
4063 arg2_rtx = get_memory_rtx (arg2, NULL);
4065 #ifdef HAVE_cmpstrsi
4066 /* Try to call cmpstrsi. */
4067 if (HAVE_cmpstrsi)
4069 machine_mode insn_mode
4070 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4072 /* Make a place to write the result of the instruction. */
4073 result = target;
4074 if (! (result != 0
4075 && REG_P (result) && GET_MODE (result) == insn_mode
4076 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4077 result = gen_reg_rtx (insn_mode);
4079 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4080 GEN_INT (MIN (arg1_align, arg2_align)));
4082 #endif
4083 #ifdef HAVE_cmpstrnsi
4084 /* Try to determine at least one length and call cmpstrnsi. */
4085 if (!insn && HAVE_cmpstrnsi)
4087 tree len;
4088 rtx arg3_rtx;
4090 machine_mode insn_mode
4091 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4092 tree len1 = c_strlen (arg1, 1);
4093 tree len2 = c_strlen (arg2, 1);
4095 if (len1)
4096 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4097 if (len2)
4098 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4100 /* If we don't have a constant length for the first, use the length
4101 of the second, if we know it. We don't require a constant for
4102 this case; some cost analysis could be done if both are available
4103 but neither is constant. For now, assume they're equally cheap,
4104 unless one has side effects. If both strings have constant lengths,
4105 use the smaller. */
4107 if (!len1)
4108 len = len2;
4109 else if (!len2)
4110 len = len1;
4111 else if (TREE_SIDE_EFFECTS (len1))
4112 len = len2;
4113 else if (TREE_SIDE_EFFECTS (len2))
4114 len = len1;
4115 else if (TREE_CODE (len1) != INTEGER_CST)
4116 len = len2;
4117 else if (TREE_CODE (len2) != INTEGER_CST)
4118 len = len1;
4119 else if (tree_int_cst_lt (len1, len2))
4120 len = len1;
4121 else
4122 len = len2;
4124 /* If both arguments have side effects, we cannot optimize. */
4125 if (!len || TREE_SIDE_EFFECTS (len))
4126 goto do_libcall;
4128 arg3_rtx = expand_normal (len);
4130 /* Make a place to write the result of the instruction. */
4131 result = target;
4132 if (! (result != 0
4133 && REG_P (result) && GET_MODE (result) == insn_mode
4134 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4135 result = gen_reg_rtx (insn_mode);
4137 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4138 GEN_INT (MIN (arg1_align, arg2_align)));
4140 #endif
4142 if (insn)
4144 machine_mode mode;
4145 emit_insn (insn);
4147 /* Return the value in the proper mode for this function. */
4148 mode = TYPE_MODE (TREE_TYPE (exp));
4149 if (GET_MODE (result) == mode)
4150 return result;
4151 if (target == 0)
4152 return convert_to_mode (mode, result, 0);
4153 convert_move (target, result, 0);
4154 return target;
4157 /* Expand the library call ourselves using a stabilized argument
4158 list to avoid re-evaluating the function's arguments twice. */
4159 #ifdef HAVE_cmpstrnsi
4160 do_libcall:
4161 #endif
4162 fndecl = get_callee_fndecl (exp);
4163 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4164 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4165 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4166 return expand_call (fn, target, target == const0_rtx);
4168 #endif
4169 return NULL_RTX;
4172 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4173 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4174 the result in TARGET, if convenient. */
4176 static rtx
4177 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4178 ATTRIBUTE_UNUSED machine_mode mode)
4180 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4182 if (!validate_arglist (exp,
4183 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4184 return NULL_RTX;
4186 /* If c_strlen can determine an expression for one of the string
4187 lengths, and it doesn't have side effects, then emit cmpstrnsi
4188 using length MIN(strlen(string)+1, arg3). */
4189 #ifdef HAVE_cmpstrnsi
4190 if (HAVE_cmpstrnsi)
4192 tree len, len1, len2;
4193 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4194 rtx result, insn;
4195 tree fndecl, fn;
4196 tree arg1 = CALL_EXPR_ARG (exp, 0);
4197 tree arg2 = CALL_EXPR_ARG (exp, 1);
4198 tree arg3 = CALL_EXPR_ARG (exp, 2);
4200 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4201 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4202 machine_mode insn_mode
4203 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4205 len1 = c_strlen (arg1, 1);
4206 len2 = c_strlen (arg2, 1);
4208 if (len1)
4209 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4210 if (len2)
4211 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4213 /* If we don't have a constant length for the first, use the length
4214 of the second, if we know it. We don't require a constant for
4215 this case; some cost analysis could be done if both are available
4216 but neither is constant. For now, assume they're equally cheap,
4217 unless one has side effects. If both strings have constant lengths,
4218 use the smaller. */
4220 if (!len1)
4221 len = len2;
4222 else if (!len2)
4223 len = len1;
4224 else if (TREE_SIDE_EFFECTS (len1))
4225 len = len2;
4226 else if (TREE_SIDE_EFFECTS (len2))
4227 len = len1;
4228 else if (TREE_CODE (len1) != INTEGER_CST)
4229 len = len2;
4230 else if (TREE_CODE (len2) != INTEGER_CST)
4231 len = len1;
4232 else if (tree_int_cst_lt (len1, len2))
4233 len = len1;
4234 else
4235 len = len2;
4237 /* If both arguments have side effects, we cannot optimize. */
4238 if (!len || TREE_SIDE_EFFECTS (len))
4239 return NULL_RTX;
4241 /* The actual new length parameter is MIN(len,arg3). */
4242 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4243 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4245 /* If we don't have POINTER_TYPE, call the function. */
4246 if (arg1_align == 0 || arg2_align == 0)
4247 return NULL_RTX;
4249 /* Make a place to write the result of the instruction. */
4250 result = target;
4251 if (! (result != 0
4252 && REG_P (result) && GET_MODE (result) == insn_mode
4253 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4254 result = gen_reg_rtx (insn_mode);
4256 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4257 arg1 = builtin_save_expr (arg1);
4258 arg2 = builtin_save_expr (arg2);
4259 len = builtin_save_expr (len);
4261 arg1_rtx = get_memory_rtx (arg1, len);
4262 arg2_rtx = get_memory_rtx (arg2, len);
4263 arg3_rtx = expand_normal (len);
4264 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4265 GEN_INT (MIN (arg1_align, arg2_align)));
4266 if (insn)
4268 emit_insn (insn);
4270 /* Return the value in the proper mode for this function. */
4271 mode = TYPE_MODE (TREE_TYPE (exp));
4272 if (GET_MODE (result) == mode)
4273 return result;
4274 if (target == 0)
4275 return convert_to_mode (mode, result, 0);
4276 convert_move (target, result, 0);
4277 return target;
4280 /* Expand the library call ourselves using a stabilized argument
4281 list to avoid re-evaluating the function's arguments twice. */
4282 fndecl = get_callee_fndecl (exp);
4283 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4284 arg1, arg2, len);
4285 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4286 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4287 return expand_call (fn, target, target == const0_rtx);
4289 #endif
4290 return NULL_RTX;
4293 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4294 if that's convenient. */
4297 expand_builtin_saveregs (void)
4299 rtx val;
4300 rtx_insn *seq;
4302 /* Don't do __builtin_saveregs more than once in a function.
4303 Save the result of the first call and reuse it. */
4304 if (saveregs_value != 0)
4305 return saveregs_value;
4307 /* When this function is called, it means that registers must be
4308 saved on entry to this function. So we migrate the call to the
4309 first insn of this function. */
4311 start_sequence ();
4313 /* Do whatever the machine needs done in this case. */
4314 val = targetm.calls.expand_builtin_saveregs ();
4316 seq = get_insns ();
4317 end_sequence ();
4319 saveregs_value = val;
4321 /* Put the insns after the NOTE that starts the function. If this
4322 is inside a start_sequence, make the outer-level insn chain current, so
4323 the code is placed at the start of the function. */
4324 push_topmost_sequence ();
4325 emit_insn_after (seq, entry_of_function ());
4326 pop_topmost_sequence ();
4328 return val;
4331 /* Expand a call to __builtin_next_arg. */
4333 static rtx
4334 expand_builtin_next_arg (void)
4336 /* Checking arguments is already done in fold_builtin_next_arg
4337 that must be called before this function. */
4338 return expand_binop (ptr_mode, add_optab,
4339 crtl->args.internal_arg_pointer,
4340 crtl->args.arg_offset_rtx,
4341 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4344 /* Make it easier for the backends by protecting the valist argument
4345 from multiple evaluations. */
4347 static tree
4348 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4350 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4352 /* The current way of determining the type of valist is completely
4353 bogus. We should have the information on the va builtin instead. */
4354 if (!vatype)
4355 vatype = targetm.fn_abi_va_list (cfun->decl);
4357 if (TREE_CODE (vatype) == ARRAY_TYPE)
4359 if (TREE_SIDE_EFFECTS (valist))
4360 valist = save_expr (valist);
4362 /* For this case, the backends will be expecting a pointer to
4363 vatype, but it's possible we've actually been given an array
4364 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4365 So fix it. */
4366 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4368 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4369 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4372 else
4374 tree pt = build_pointer_type (vatype);
4376 if (! needs_lvalue)
4378 if (! TREE_SIDE_EFFECTS (valist))
4379 return valist;
4381 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4382 TREE_SIDE_EFFECTS (valist) = 1;
4385 if (TREE_SIDE_EFFECTS (valist))
4386 valist = save_expr (valist);
4387 valist = fold_build2_loc (loc, MEM_REF,
4388 vatype, valist, build_int_cst (pt, 0));
4391 return valist;
4394 /* The "standard" definition of va_list is void*. */
4396 tree
4397 std_build_builtin_va_list (void)
4399 return ptr_type_node;
4402 /* The "standard" abi va_list is va_list_type_node. */
4404 tree
4405 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4407 return va_list_type_node;
4410 /* The "standard" type of va_list is va_list_type_node. */
4412 tree
4413 std_canonical_va_list_type (tree type)
4415 tree wtype, htype;
4417 if (INDIRECT_REF_P (type))
4418 type = TREE_TYPE (type);
4419 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4420 type = TREE_TYPE (type);
4421 wtype = va_list_type_node;
4422 htype = type;
4423 /* Treat structure va_list types. */
4424 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4425 htype = TREE_TYPE (htype);
4426 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4428 /* If va_list is an array type, the argument may have decayed
4429 to a pointer type, e.g. by being passed to another function.
4430 In that case, unwrap both types so that we can compare the
4431 underlying records. */
4432 if (TREE_CODE (htype) == ARRAY_TYPE
4433 || POINTER_TYPE_P (htype))
4435 wtype = TREE_TYPE (wtype);
4436 htype = TREE_TYPE (htype);
4439 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4440 return va_list_type_node;
4442 return NULL_TREE;
4445 /* The "standard" implementation of va_start: just assign `nextarg' to
4446 the variable. */
4448 void
4449 std_expand_builtin_va_start (tree valist, rtx nextarg)
4451 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4452 convert_move (va_r, nextarg, 0);
4454 /* We do not have any valid bounds for the pointer, so
4455 just store zero bounds for it. */
4456 if (chkp_function_instrumented_p (current_function_decl))
4457 chkp_expand_bounds_reset_for_mem (valist,
4458 make_tree (TREE_TYPE (valist),
4459 nextarg));
4462 /* Expand EXP, a call to __builtin_va_start. */
4464 static rtx
4465 expand_builtin_va_start (tree exp)
4467 rtx nextarg;
4468 tree valist;
4469 location_t loc = EXPR_LOCATION (exp);
4471 if (call_expr_nargs (exp) < 2)
4473 error_at (loc, "too few arguments to function %<va_start%>");
4474 return const0_rtx;
4477 if (fold_builtin_next_arg (exp, true))
4478 return const0_rtx;
4480 nextarg = expand_builtin_next_arg ();
4481 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4483 if (targetm.expand_builtin_va_start)
4484 targetm.expand_builtin_va_start (valist, nextarg);
4485 else
4486 std_expand_builtin_va_start (valist, nextarg);
4488 return const0_rtx;
4491 /* Expand EXP, a call to __builtin_va_end. */
4493 static rtx
4494 expand_builtin_va_end (tree exp)
4496 tree valist = CALL_EXPR_ARG (exp, 0);
4498 /* Evaluate for side effects, if needed. I hate macros that don't
4499 do that. */
4500 if (TREE_SIDE_EFFECTS (valist))
4501 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4503 return const0_rtx;
4506 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4507 builtin rather than just as an assignment in stdarg.h because of the
4508 nastiness of array-type va_list types. */
4510 static rtx
4511 expand_builtin_va_copy (tree exp)
4513 tree dst, src, t;
4514 location_t loc = EXPR_LOCATION (exp);
4516 dst = CALL_EXPR_ARG (exp, 0);
4517 src = CALL_EXPR_ARG (exp, 1);
4519 dst = stabilize_va_list_loc (loc, dst, 1);
4520 src = stabilize_va_list_loc (loc, src, 0);
4522 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4524 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4526 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4527 TREE_SIDE_EFFECTS (t) = 1;
4528 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4530 else
4532 rtx dstb, srcb, size;
4534 /* Evaluate to pointers. */
4535 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4536 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4537 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4538 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4540 dstb = convert_memory_address (Pmode, dstb);
4541 srcb = convert_memory_address (Pmode, srcb);
4543 /* "Dereference" to BLKmode memories. */
4544 dstb = gen_rtx_MEM (BLKmode, dstb);
4545 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4546 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4547 srcb = gen_rtx_MEM (BLKmode, srcb);
4548 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4549 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4551 /* Copy. */
4552 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4555 return const0_rtx;
4558 /* Expand a call to one of the builtin functions __builtin_frame_address or
4559 __builtin_return_address. */
4561 static rtx
4562 expand_builtin_frame_address (tree fndecl, tree exp)
4564 /* The argument must be a nonnegative integer constant.
4565 It counts the number of frames to scan up the stack.
4566 The value is the return address saved in that frame. */
4567 if (call_expr_nargs (exp) == 0)
4568 /* Warning about missing arg was already issued. */
4569 return const0_rtx;
4570 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4572 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4573 error ("invalid argument to %<__builtin_frame_address%>");
4574 else
4575 error ("invalid argument to %<__builtin_return_address%>");
4576 return const0_rtx;
4578 else
4580 rtx tem
4581 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4582 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4584 /* Some ports cannot access arbitrary stack frames. */
4585 if (tem == NULL)
4587 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4588 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4589 else
4590 warning (0, "unsupported argument to %<__builtin_return_address%>");
4591 return const0_rtx;
4594 /* For __builtin_frame_address, return what we've got. */
4595 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4596 return tem;
4598 if (!REG_P (tem)
4599 && ! CONSTANT_P (tem))
4600 tem = copy_addr_to_reg (tem);
4601 return tem;
4605 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4606 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4607 is the same as for allocate_dynamic_stack_space. */
4609 static rtx
4610 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4612 rtx op0;
4613 rtx result;
4614 bool valid_arglist;
4615 unsigned int align;
4616 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4617 == BUILT_IN_ALLOCA_WITH_ALIGN);
4619 valid_arglist
4620 = (alloca_with_align
4621 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4622 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4624 if (!valid_arglist)
4625 return NULL_RTX;
4627 /* Compute the argument. */
4628 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4630 /* Compute the alignment. */
4631 align = (alloca_with_align
4632 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4633 : BIGGEST_ALIGNMENT);
4635 /* Allocate the desired space. */
4636 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4637 result = convert_memory_address (ptr_mode, result);
4639 return result;
4642 /* Expand a call to bswap builtin in EXP.
4643 Return NULL_RTX if a normal call should be emitted rather than expanding the
4644 function in-line. If convenient, the result should be placed in TARGET.
4645 SUBTARGET may be used as the target for computing one of EXP's operands. */
4647 static rtx
4648 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4649 rtx subtarget)
4651 tree arg;
4652 rtx op0;
4654 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4655 return NULL_RTX;
4657 arg = CALL_EXPR_ARG (exp, 0);
4658 op0 = expand_expr (arg,
4659 subtarget && GET_MODE (subtarget) == target_mode
4660 ? subtarget : NULL_RTX,
4661 target_mode, EXPAND_NORMAL);
4662 if (GET_MODE (op0) != target_mode)
4663 op0 = convert_to_mode (target_mode, op0, 1);
4665 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4667 gcc_assert (target);
4669 return convert_to_mode (target_mode, target, 1);
4672 /* Expand a call to a unary builtin in EXP.
4673 Return NULL_RTX if a normal call should be emitted rather than expanding the
4674 function in-line. If convenient, the result should be placed in TARGET.
4675 SUBTARGET may be used as the target for computing one of EXP's operands. */
4677 static rtx
4678 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4679 rtx subtarget, optab op_optab)
4681 rtx op0;
4683 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4684 return NULL_RTX;
4686 /* Compute the argument. */
4687 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4688 (subtarget
4689 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4690 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4691 VOIDmode, EXPAND_NORMAL);
4692 /* Compute op, into TARGET if possible.
4693 Set TARGET to wherever the result comes back. */
4694 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4695 op_optab, op0, target, op_optab != clrsb_optab);
4696 gcc_assert (target);
4698 return convert_to_mode (target_mode, target, 0);
4701 /* Expand a call to __builtin_expect. We just return our argument
4702 as the builtin_expect semantic should've been already executed by
4703 tree branch prediction pass. */
4705 static rtx
4706 expand_builtin_expect (tree exp, rtx target)
4708 tree arg;
4710 if (call_expr_nargs (exp) < 2)
4711 return const0_rtx;
4712 arg = CALL_EXPR_ARG (exp, 0);
4714 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4715 /* When guessing was done, the hints should be already stripped away. */
4716 gcc_assert (!flag_guess_branch_prob
4717 || optimize == 0 || seen_error ());
4718 return target;
4721 /* Expand a call to __builtin_assume_aligned. We just return our first
4722 argument as the builtin_assume_aligned semantic should've been already
4723 executed by CCP. */
4725 static rtx
4726 expand_builtin_assume_aligned (tree exp, rtx target)
4728 if (call_expr_nargs (exp) < 2)
4729 return const0_rtx;
4730 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4731 EXPAND_NORMAL);
4732 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4733 && (call_expr_nargs (exp) < 3
4734 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4735 return target;
4738 void
4739 expand_builtin_trap (void)
4741 #ifdef HAVE_trap
4742 if (HAVE_trap)
4744 rtx insn = emit_insn (gen_trap ());
4745 /* For trap insns when not accumulating outgoing args force
4746 REG_ARGS_SIZE note to prevent crossjumping of calls with
4747 different args sizes. */
4748 if (!ACCUMULATE_OUTGOING_ARGS)
4749 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4751 else
4752 #endif
4753 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4754 emit_barrier ();
4757 /* Expand a call to __builtin_unreachable. We do nothing except emit
4758 a barrier saying that control flow will not pass here.
4760 It is the responsibility of the program being compiled to ensure
4761 that control flow does never reach __builtin_unreachable. */
4762 static void
4763 expand_builtin_unreachable (void)
4765 emit_barrier ();
4768 /* Expand EXP, a call to fabs, fabsf or fabsl.
4769 Return NULL_RTX if a normal call should be emitted rather than expanding
4770 the function inline. If convenient, the result should be placed
4771 in TARGET. SUBTARGET may be used as the target for computing
4772 the operand. */
4774 static rtx
4775 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4777 machine_mode mode;
4778 tree arg;
4779 rtx op0;
4781 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4782 return NULL_RTX;
4784 arg = CALL_EXPR_ARG (exp, 0);
4785 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4786 mode = TYPE_MODE (TREE_TYPE (arg));
4787 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4788 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4791 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4792 Return NULL is a normal call should be emitted rather than expanding the
4793 function inline. If convenient, the result should be placed in TARGET.
4794 SUBTARGET may be used as the target for computing the operand. */
4796 static rtx
4797 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4799 rtx op0, op1;
4800 tree arg;
4802 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4803 return NULL_RTX;
4805 arg = CALL_EXPR_ARG (exp, 0);
4806 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4808 arg = CALL_EXPR_ARG (exp, 1);
4809 op1 = expand_normal (arg);
4811 return expand_copysign (op0, op1, target);
4814 /* Expand a call to __builtin___clear_cache. */
4816 static rtx
4817 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4819 #ifndef HAVE_clear_cache
4820 #ifdef CLEAR_INSN_CACHE
4821 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4822 does something. Just do the default expansion to a call to
4823 __clear_cache(). */
4824 return NULL_RTX;
4825 #else
4826 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4827 does nothing. There is no need to call it. Do nothing. */
4828 return const0_rtx;
4829 #endif /* CLEAR_INSN_CACHE */
4830 #else
4831 /* We have a "clear_cache" insn, and it will handle everything. */
4832 tree begin, end;
4833 rtx begin_rtx, end_rtx;
4835 /* We must not expand to a library call. If we did, any
4836 fallback library function in libgcc that might contain a call to
4837 __builtin___clear_cache() would recurse infinitely. */
4838 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4840 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4841 return const0_rtx;
4844 if (HAVE_clear_cache)
4846 struct expand_operand ops[2];
4848 begin = CALL_EXPR_ARG (exp, 0);
4849 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4851 end = CALL_EXPR_ARG (exp, 1);
4852 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4854 create_address_operand (&ops[0], begin_rtx);
4855 create_address_operand (&ops[1], end_rtx);
4856 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4857 return const0_rtx;
4859 return const0_rtx;
4860 #endif /* HAVE_clear_cache */
4863 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4865 static rtx
4866 round_trampoline_addr (rtx tramp)
4868 rtx temp, addend, mask;
4870 /* If we don't need too much alignment, we'll have been guaranteed
4871 proper alignment by get_trampoline_type. */
4872 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4873 return tramp;
4875 /* Round address up to desired boundary. */
4876 temp = gen_reg_rtx (Pmode);
4877 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4878 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4880 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4881 temp, 0, OPTAB_LIB_WIDEN);
4882 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4883 temp, 0, OPTAB_LIB_WIDEN);
4885 return tramp;
4888 static rtx
4889 expand_builtin_init_trampoline (tree exp, bool onstack)
4891 tree t_tramp, t_func, t_chain;
4892 rtx m_tramp, r_tramp, r_chain, tmp;
4894 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4895 POINTER_TYPE, VOID_TYPE))
4896 return NULL_RTX;
4898 t_tramp = CALL_EXPR_ARG (exp, 0);
4899 t_func = CALL_EXPR_ARG (exp, 1);
4900 t_chain = CALL_EXPR_ARG (exp, 2);
4902 r_tramp = expand_normal (t_tramp);
4903 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4904 MEM_NOTRAP_P (m_tramp) = 1;
4906 /* If ONSTACK, the TRAMP argument should be the address of a field
4907 within the local function's FRAME decl. Either way, let's see if
4908 we can fill in the MEM_ATTRs for this memory. */
4909 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4910 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4912 /* Creator of a heap trampoline is responsible for making sure the
4913 address is aligned to at least STACK_BOUNDARY. Normally malloc
4914 will ensure this anyhow. */
4915 tmp = round_trampoline_addr (r_tramp);
4916 if (tmp != r_tramp)
4918 m_tramp = change_address (m_tramp, BLKmode, tmp);
4919 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4920 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4923 /* The FUNC argument should be the address of the nested function.
4924 Extract the actual function decl to pass to the hook. */
4925 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4926 t_func = TREE_OPERAND (t_func, 0);
4927 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4929 r_chain = expand_normal (t_chain);
4931 /* Generate insns to initialize the trampoline. */
4932 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4934 if (onstack)
4936 trampolines_created = 1;
4938 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4939 "trampoline generated for nested function %qD", t_func);
4942 return const0_rtx;
4945 static rtx
4946 expand_builtin_adjust_trampoline (tree exp)
4948 rtx tramp;
4950 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4951 return NULL_RTX;
4953 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4954 tramp = round_trampoline_addr (tramp);
4955 if (targetm.calls.trampoline_adjust_address)
4956 tramp = targetm.calls.trampoline_adjust_address (tramp);
4958 return tramp;
4961 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4962 function. The function first checks whether the back end provides
4963 an insn to implement signbit for the respective mode. If not, it
4964 checks whether the floating point format of the value is such that
4965 the sign bit can be extracted. If that is not the case, the
4966 function returns NULL_RTX to indicate that a normal call should be
4967 emitted rather than expanding the function in-line. EXP is the
4968 expression that is a call to the builtin function; if convenient,
4969 the result should be placed in TARGET. */
4970 static rtx
4971 expand_builtin_signbit (tree exp, rtx target)
4973 const struct real_format *fmt;
4974 machine_mode fmode, imode, rmode;
4975 tree arg;
4976 int word, bitpos;
4977 enum insn_code icode;
4978 rtx temp;
4979 location_t loc = EXPR_LOCATION (exp);
4981 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4982 return NULL_RTX;
4984 arg = CALL_EXPR_ARG (exp, 0);
4985 fmode = TYPE_MODE (TREE_TYPE (arg));
4986 rmode = TYPE_MODE (TREE_TYPE (exp));
4987 fmt = REAL_MODE_FORMAT (fmode);
4989 arg = builtin_save_expr (arg);
4991 /* Expand the argument yielding a RTX expression. */
4992 temp = expand_normal (arg);
4994 /* Check if the back end provides an insn that handles signbit for the
4995 argument's mode. */
4996 icode = optab_handler (signbit_optab, fmode);
4997 if (icode != CODE_FOR_nothing)
4999 rtx_insn *last = get_last_insn ();
5000 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5001 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5002 return target;
5003 delete_insns_since (last);
5006 /* For floating point formats without a sign bit, implement signbit
5007 as "ARG < 0.0". */
5008 bitpos = fmt->signbit_ro;
5009 if (bitpos < 0)
5011 /* But we can't do this if the format supports signed zero. */
5012 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5013 return NULL_RTX;
5015 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5016 build_real (TREE_TYPE (arg), dconst0));
5017 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5020 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5022 imode = int_mode_for_mode (fmode);
5023 if (imode == BLKmode)
5024 return NULL_RTX;
5025 temp = gen_lowpart (imode, temp);
5027 else
5029 imode = word_mode;
5030 /* Handle targets with different FP word orders. */
5031 if (FLOAT_WORDS_BIG_ENDIAN)
5032 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5033 else
5034 word = bitpos / BITS_PER_WORD;
5035 temp = operand_subword_force (temp, word, fmode);
5036 bitpos = bitpos % BITS_PER_WORD;
5039 /* Force the intermediate word_mode (or narrower) result into a
5040 register. This avoids attempting to create paradoxical SUBREGs
5041 of floating point modes below. */
5042 temp = force_reg (imode, temp);
5044 /* If the bitpos is within the "result mode" lowpart, the operation
5045 can be implement with a single bitwise AND. Otherwise, we need
5046 a right shift and an AND. */
5048 if (bitpos < GET_MODE_BITSIZE (rmode))
5050 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5052 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5053 temp = gen_lowpart (rmode, temp);
5054 temp = expand_binop (rmode, and_optab, temp,
5055 immed_wide_int_const (mask, rmode),
5056 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5058 else
5060 /* Perform a logical right shift to place the signbit in the least
5061 significant bit, then truncate the result to the desired mode
5062 and mask just this bit. */
5063 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5064 temp = gen_lowpart (rmode, temp);
5065 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5066 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5069 return temp;
5072 /* Expand fork or exec calls. TARGET is the desired target of the
5073 call. EXP is the call. FN is the
5074 identificator of the actual function. IGNORE is nonzero if the
5075 value is to be ignored. */
5077 static rtx
5078 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5080 tree id, decl;
5081 tree call;
5083 /* If we are not profiling, just call the function. */
5084 if (!profile_arc_flag)
5085 return NULL_RTX;
5087 /* Otherwise call the wrapper. This should be equivalent for the rest of
5088 compiler, so the code does not diverge, and the wrapper may run the
5089 code necessary for keeping the profiling sane. */
5091 switch (DECL_FUNCTION_CODE (fn))
5093 case BUILT_IN_FORK:
5094 id = get_identifier ("__gcov_fork");
5095 break;
5097 case BUILT_IN_EXECL:
5098 id = get_identifier ("__gcov_execl");
5099 break;
5101 case BUILT_IN_EXECV:
5102 id = get_identifier ("__gcov_execv");
5103 break;
5105 case BUILT_IN_EXECLP:
5106 id = get_identifier ("__gcov_execlp");
5107 break;
5109 case BUILT_IN_EXECLE:
5110 id = get_identifier ("__gcov_execle");
5111 break;
5113 case BUILT_IN_EXECVP:
5114 id = get_identifier ("__gcov_execvp");
5115 break;
5117 case BUILT_IN_EXECVE:
5118 id = get_identifier ("__gcov_execve");
5119 break;
5121 default:
5122 gcc_unreachable ();
5125 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5126 FUNCTION_DECL, id, TREE_TYPE (fn));
5127 DECL_EXTERNAL (decl) = 1;
5128 TREE_PUBLIC (decl) = 1;
5129 DECL_ARTIFICIAL (decl) = 1;
5130 TREE_NOTHROW (decl) = 1;
5131 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5132 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5133 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5134 return expand_call (call, target, ignore);
5139 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5140 the pointer in these functions is void*, the tree optimizers may remove
5141 casts. The mode computed in expand_builtin isn't reliable either, due
5142 to __sync_bool_compare_and_swap.
5144 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5145 group of builtins. This gives us log2 of the mode size. */
5147 static inline machine_mode
5148 get_builtin_sync_mode (int fcode_diff)
5150 /* The size is not negotiable, so ask not to get BLKmode in return
5151 if the target indicates that a smaller size would be better. */
5152 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5155 /* Expand the memory expression LOC and return the appropriate memory operand
5156 for the builtin_sync operations. */
5158 static rtx
5159 get_builtin_sync_mem (tree loc, machine_mode mode)
5161 rtx addr, mem;
5163 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5164 addr = convert_memory_address (Pmode, addr);
5166 /* Note that we explicitly do not want any alias information for this
5167 memory, so that we kill all other live memories. Otherwise we don't
5168 satisfy the full barrier semantics of the intrinsic. */
5169 mem = validize_mem (gen_rtx_MEM (mode, addr));
5171 /* The alignment needs to be at least according to that of the mode. */
5172 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5173 get_pointer_alignment (loc)));
5174 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5175 MEM_VOLATILE_P (mem) = 1;
5177 return mem;
5180 /* Make sure an argument is in the right mode.
5181 EXP is the tree argument.
5182 MODE is the mode it should be in. */
5184 static rtx
5185 expand_expr_force_mode (tree exp, machine_mode mode)
5187 rtx val;
5188 machine_mode old_mode;
5190 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5191 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5192 of CONST_INTs, where we know the old_mode only from the call argument. */
5194 old_mode = GET_MODE (val);
5195 if (old_mode == VOIDmode)
5196 old_mode = TYPE_MODE (TREE_TYPE (exp));
5197 val = convert_modes (mode, old_mode, val, 1);
5198 return val;
5202 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5203 EXP is the CALL_EXPR. CODE is the rtx code
5204 that corresponds to the arithmetic or logical operation from the name;
5205 an exception here is that NOT actually means NAND. TARGET is an optional
5206 place for us to store the results; AFTER is true if this is the
5207 fetch_and_xxx form. */
5209 static rtx
5210 expand_builtin_sync_operation (machine_mode mode, tree exp,
5211 enum rtx_code code, bool after,
5212 rtx target)
5214 rtx val, mem;
5215 location_t loc = EXPR_LOCATION (exp);
5217 if (code == NOT && warn_sync_nand)
5219 tree fndecl = get_callee_fndecl (exp);
5220 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5222 static bool warned_f_a_n, warned_n_a_f;
5224 switch (fcode)
5226 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5227 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5228 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5229 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5230 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5231 if (warned_f_a_n)
5232 break;
5234 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5235 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5236 warned_f_a_n = true;
5237 break;
5239 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5240 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5241 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5242 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5243 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5244 if (warned_n_a_f)
5245 break;
5247 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5248 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5249 warned_n_a_f = true;
5250 break;
5252 default:
5253 gcc_unreachable ();
5257 /* Expand the operands. */
5258 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5259 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5261 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5262 after);
5265 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5266 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5267 true if this is the boolean form. TARGET is a place for us to store the
5268 results; this is NOT optional if IS_BOOL is true. */
5270 static rtx
5271 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5272 bool is_bool, rtx target)
5274 rtx old_val, new_val, mem;
5275 rtx *pbool, *poval;
5277 /* Expand the operands. */
5278 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5279 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5280 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5282 pbool = poval = NULL;
5283 if (target != const0_rtx)
5285 if (is_bool)
5286 pbool = &target;
5287 else
5288 poval = &target;
5290 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5291 false, MEMMODEL_SEQ_CST,
5292 MEMMODEL_SEQ_CST))
5293 return NULL_RTX;
5295 return target;
5298 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5299 general form is actually an atomic exchange, and some targets only
5300 support a reduced form with the second argument being a constant 1.
5301 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5302 the results. */
5304 static rtx
5305 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5306 rtx target)
5308 rtx val, mem;
5310 /* Expand the operands. */
5311 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5312 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5314 return expand_sync_lock_test_and_set (target, mem, val);
5317 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5319 static void
5320 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5322 rtx mem;
5324 /* Expand the operands. */
5325 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5327 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5330 /* Given an integer representing an ``enum memmodel'', verify its
5331 correctness and return the memory model enum. */
5333 static enum memmodel
5334 get_memmodel (tree exp)
5336 rtx op;
5337 unsigned HOST_WIDE_INT val;
5339 /* If the parameter is not a constant, it's a run time value so we'll just
5340 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5341 if (TREE_CODE (exp) != INTEGER_CST)
5342 return MEMMODEL_SEQ_CST;
5344 op = expand_normal (exp);
5346 val = INTVAL (op);
5347 if (targetm.memmodel_check)
5348 val = targetm.memmodel_check (val);
5349 else if (val & ~MEMMODEL_MASK)
5351 warning (OPT_Winvalid_memory_model,
5352 "Unknown architecture specifier in memory model to builtin.");
5353 return MEMMODEL_SEQ_CST;
5356 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5358 warning (OPT_Winvalid_memory_model,
5359 "invalid memory model argument to builtin");
5360 return MEMMODEL_SEQ_CST;
5363 return (enum memmodel) val;
5366 /* Expand the __atomic_exchange intrinsic:
5367 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5368 EXP is the CALL_EXPR.
5369 TARGET is an optional place for us to store the results. */
5371 static rtx
5372 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5374 rtx val, mem;
5375 enum memmodel model;
5377 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5378 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5380 error ("invalid memory model for %<__atomic_exchange%>");
5381 return NULL_RTX;
5384 if (!flag_inline_atomics)
5385 return NULL_RTX;
5387 /* Expand the operands. */
5388 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5389 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5391 return expand_atomic_exchange (target, mem, val, model);
5394 /* Expand the __atomic_compare_exchange intrinsic:
5395 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5396 TYPE desired, BOOL weak,
5397 enum memmodel success,
5398 enum memmodel failure)
5399 EXP is the CALL_EXPR.
5400 TARGET is an optional place for us to store the results. */
5402 static rtx
5403 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5404 rtx target)
5406 rtx expect, desired, mem, oldval;
5407 rtx_code_label *label;
5408 enum memmodel success, failure;
5409 tree weak;
5410 bool is_weak;
5412 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5413 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5415 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5416 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5418 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5419 return NULL_RTX;
5422 if (failure > success)
5424 error ("failure memory model cannot be stronger than success "
5425 "memory model for %<__atomic_compare_exchange%>");
5426 return NULL_RTX;
5429 if (!flag_inline_atomics)
5430 return NULL_RTX;
5432 /* Expand the operands. */
5433 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5435 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5436 expect = convert_memory_address (Pmode, expect);
5437 expect = gen_rtx_MEM (mode, expect);
5438 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5440 weak = CALL_EXPR_ARG (exp, 3);
5441 is_weak = false;
5442 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5443 is_weak = true;
5445 if (target == const0_rtx)
5446 target = NULL;
5448 /* Lest the rtl backend create a race condition with an imporoper store
5449 to memory, always create a new pseudo for OLDVAL. */
5450 oldval = NULL;
5452 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5453 is_weak, success, failure))
5454 return NULL_RTX;
5456 /* Conditionally store back to EXPECT, lest we create a race condition
5457 with an improper store to memory. */
5458 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5459 the normal case where EXPECT is totally private, i.e. a register. At
5460 which point the store can be unconditional. */
5461 label = gen_label_rtx ();
5462 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5463 emit_move_insn (expect, oldval);
5464 emit_label (label);
5466 return target;
5469 /* Expand the __atomic_load intrinsic:
5470 TYPE __atomic_load (TYPE *object, enum memmodel)
5471 EXP is the CALL_EXPR.
5472 TARGET is an optional place for us to store the results. */
5474 static rtx
5475 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5477 rtx mem;
5478 enum memmodel model;
5480 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5481 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5482 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5484 error ("invalid memory model for %<__atomic_load%>");
5485 return NULL_RTX;
5488 if (!flag_inline_atomics)
5489 return NULL_RTX;
5491 /* Expand the operand. */
5492 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5494 return expand_atomic_load (target, mem, model);
5498 /* Expand the __atomic_store intrinsic:
5499 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5500 EXP is the CALL_EXPR.
5501 TARGET is an optional place for us to store the results. */
5503 static rtx
5504 expand_builtin_atomic_store (machine_mode mode, tree exp)
5506 rtx mem, val;
5507 enum memmodel model;
5509 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5510 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5511 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5512 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5514 error ("invalid memory model for %<__atomic_store%>");
5515 return NULL_RTX;
5518 if (!flag_inline_atomics)
5519 return NULL_RTX;
5521 /* Expand the operands. */
5522 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5523 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5525 return expand_atomic_store (mem, val, model, false);
5528 /* Expand the __atomic_fetch_XXX intrinsic:
5529 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5530 EXP is the CALL_EXPR.
5531 TARGET is an optional place for us to store the results.
5532 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5533 FETCH_AFTER is true if returning the result of the operation.
5534 FETCH_AFTER is false if returning the value before the operation.
5535 IGNORE is true if the result is not used.
5536 EXT_CALL is the correct builtin for an external call if this cannot be
5537 resolved to an instruction sequence. */
5539 static rtx
5540 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5541 enum rtx_code code, bool fetch_after,
5542 bool ignore, enum built_in_function ext_call)
5544 rtx val, mem, ret;
5545 enum memmodel model;
5546 tree fndecl;
5547 tree addr;
5549 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5551 /* Expand the operands. */
5552 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5553 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5555 /* Only try generating instructions if inlining is turned on. */
5556 if (flag_inline_atomics)
5558 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5559 if (ret)
5560 return ret;
5563 /* Return if a different routine isn't needed for the library call. */
5564 if (ext_call == BUILT_IN_NONE)
5565 return NULL_RTX;
5567 /* Change the call to the specified function. */
5568 fndecl = get_callee_fndecl (exp);
5569 addr = CALL_EXPR_FN (exp);
5570 STRIP_NOPS (addr);
5572 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5573 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5575 /* Expand the call here so we can emit trailing code. */
5576 ret = expand_call (exp, target, ignore);
5578 /* Replace the original function just in case it matters. */
5579 TREE_OPERAND (addr, 0) = fndecl;
5581 /* Then issue the arithmetic correction to return the right result. */
5582 if (!ignore)
5584 if (code == NOT)
5586 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5587 OPTAB_LIB_WIDEN);
5588 ret = expand_simple_unop (mode, NOT, ret, target, true);
5590 else
5591 ret = expand_simple_binop (mode, code, ret, val, target, true,
5592 OPTAB_LIB_WIDEN);
5594 return ret;
5598 #ifndef HAVE_atomic_clear
5599 # define HAVE_atomic_clear 0
5600 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5601 #endif
5603 /* Expand an atomic clear operation.
5604 void _atomic_clear (BOOL *obj, enum memmodel)
5605 EXP is the call expression. */
5607 static rtx
5608 expand_builtin_atomic_clear (tree exp)
5610 machine_mode mode;
5611 rtx mem, ret;
5612 enum memmodel model;
5614 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5615 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5616 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5618 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5619 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5621 error ("invalid memory model for %<__atomic_store%>");
5622 return const0_rtx;
5625 if (HAVE_atomic_clear)
5627 emit_insn (gen_atomic_clear (mem, model));
5628 return const0_rtx;
5631 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5632 Failing that, a store is issued by __atomic_store. The only way this can
5633 fail is if the bool type is larger than a word size. Unlikely, but
5634 handle it anyway for completeness. Assume a single threaded model since
5635 there is no atomic support in this case, and no barriers are required. */
5636 ret = expand_atomic_store (mem, const0_rtx, model, true);
5637 if (!ret)
5638 emit_move_insn (mem, const0_rtx);
5639 return const0_rtx;
5642 /* Expand an atomic test_and_set operation.
5643 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5644 EXP is the call expression. */
5646 static rtx
5647 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5649 rtx mem;
5650 enum memmodel model;
5651 machine_mode mode;
5653 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5654 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5655 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5657 return expand_atomic_test_and_set (target, mem, model);
5661 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5662 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5664 static tree
5665 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5667 int size;
5668 machine_mode mode;
5669 unsigned int mode_align, type_align;
5671 if (TREE_CODE (arg0) != INTEGER_CST)
5672 return NULL_TREE;
5674 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5675 mode = mode_for_size (size, MODE_INT, 0);
5676 mode_align = GET_MODE_ALIGNMENT (mode);
5678 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5679 type_align = mode_align;
5680 else
5682 tree ttype = TREE_TYPE (arg1);
5684 /* This function is usually invoked and folded immediately by the front
5685 end before anything else has a chance to look at it. The pointer
5686 parameter at this point is usually cast to a void *, so check for that
5687 and look past the cast. */
5688 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5689 && VOID_TYPE_P (TREE_TYPE (ttype)))
5690 arg1 = TREE_OPERAND (arg1, 0);
5692 ttype = TREE_TYPE (arg1);
5693 gcc_assert (POINTER_TYPE_P (ttype));
5695 /* Get the underlying type of the object. */
5696 ttype = TREE_TYPE (ttype);
5697 type_align = TYPE_ALIGN (ttype);
5700 /* If the object has smaller alignment, the the lock free routines cannot
5701 be used. */
5702 if (type_align < mode_align)
5703 return boolean_false_node;
5705 /* Check if a compare_and_swap pattern exists for the mode which represents
5706 the required size. The pattern is not allowed to fail, so the existence
5707 of the pattern indicates support is present. */
5708 if (can_compare_and_swap_p (mode, true))
5709 return boolean_true_node;
5710 else
5711 return boolean_false_node;
5714 /* Return true if the parameters to call EXP represent an object which will
5715 always generate lock free instructions. The first argument represents the
5716 size of the object, and the second parameter is a pointer to the object
5717 itself. If NULL is passed for the object, then the result is based on
5718 typical alignment for an object of the specified size. Otherwise return
5719 false. */
5721 static rtx
5722 expand_builtin_atomic_always_lock_free (tree exp)
5724 tree size;
5725 tree arg0 = CALL_EXPR_ARG (exp, 0);
5726 tree arg1 = CALL_EXPR_ARG (exp, 1);
5728 if (TREE_CODE (arg0) != INTEGER_CST)
5730 error ("non-constant argument 1 to __atomic_always_lock_free");
5731 return const0_rtx;
5734 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5735 if (size == boolean_true_node)
5736 return const1_rtx;
5737 return const0_rtx;
5740 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5741 is lock free on this architecture. */
5743 static tree
5744 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5746 if (!flag_inline_atomics)
5747 return NULL_TREE;
5749 /* If it isn't always lock free, don't generate a result. */
5750 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5751 return boolean_true_node;
5753 return NULL_TREE;
5756 /* Return true if the parameters to call EXP represent an object which will
5757 always generate lock free instructions. The first argument represents the
5758 size of the object, and the second parameter is a pointer to the object
5759 itself. If NULL is passed for the object, then the result is based on
5760 typical alignment for an object of the specified size. Otherwise return
5761 NULL*/
5763 static rtx
5764 expand_builtin_atomic_is_lock_free (tree exp)
5766 tree size;
5767 tree arg0 = CALL_EXPR_ARG (exp, 0);
5768 tree arg1 = CALL_EXPR_ARG (exp, 1);
5770 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5772 error ("non-integer argument 1 to __atomic_is_lock_free");
5773 return NULL_RTX;
5776 if (!flag_inline_atomics)
5777 return NULL_RTX;
5779 /* If the value is known at compile time, return the RTX for it. */
5780 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5781 if (size == boolean_true_node)
5782 return const1_rtx;
5784 return NULL_RTX;
5787 /* Expand the __atomic_thread_fence intrinsic:
5788 void __atomic_thread_fence (enum memmodel)
5789 EXP is the CALL_EXPR. */
5791 static void
5792 expand_builtin_atomic_thread_fence (tree exp)
5794 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5795 expand_mem_thread_fence (model);
5798 /* Expand the __atomic_signal_fence intrinsic:
5799 void __atomic_signal_fence (enum memmodel)
5800 EXP is the CALL_EXPR. */
5802 static void
5803 expand_builtin_atomic_signal_fence (tree exp)
5805 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5806 expand_mem_signal_fence (model);
5809 /* Expand the __sync_synchronize intrinsic. */
5811 static void
5812 expand_builtin_sync_synchronize (void)
5814 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5817 static rtx
5818 expand_builtin_thread_pointer (tree exp, rtx target)
5820 enum insn_code icode;
5821 if (!validate_arglist (exp, VOID_TYPE))
5822 return const0_rtx;
5823 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5824 if (icode != CODE_FOR_nothing)
5826 struct expand_operand op;
5827 /* If the target is not sutitable then create a new target. */
5828 if (target == NULL_RTX
5829 || !REG_P (target)
5830 || GET_MODE (target) != Pmode)
5831 target = gen_reg_rtx (Pmode);
5832 create_output_operand (&op, target, Pmode);
5833 expand_insn (icode, 1, &op);
5834 return target;
5836 error ("__builtin_thread_pointer is not supported on this target");
5837 return const0_rtx;
5840 static void
5841 expand_builtin_set_thread_pointer (tree exp)
5843 enum insn_code icode;
5844 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5845 return;
5846 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5847 if (icode != CODE_FOR_nothing)
5849 struct expand_operand op;
5850 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5851 Pmode, EXPAND_NORMAL);
5852 create_input_operand (&op, val, Pmode);
5853 expand_insn (icode, 1, &op);
5854 return;
5856 error ("__builtin_set_thread_pointer is not supported on this target");
5860 /* Emit code to restore the current value of stack. */
5862 static void
5863 expand_stack_restore (tree var)
5865 rtx_insn *prev;
5866 rtx sa = expand_normal (var);
5868 sa = convert_memory_address (Pmode, sa);
5870 prev = get_last_insn ();
5871 emit_stack_restore (SAVE_BLOCK, sa);
5872 fixup_args_size_notes (prev, get_last_insn (), 0);
5876 /* Emit code to save the current value of stack. */
5878 static rtx
5879 expand_stack_save (void)
5881 rtx ret = NULL_RTX;
5883 do_pending_stack_adjust ();
5884 emit_stack_save (SAVE_BLOCK, &ret);
5885 return ret;
5888 /* Expand an expression EXP that calls a built-in function,
5889 with result going to TARGET if that's convenient
5890 (and in mode MODE if that's convenient).
5891 SUBTARGET may be used as the target for computing one of EXP's operands.
5892 IGNORE is nonzero if the value is to be ignored. */
5895 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5896 int ignore)
5898 tree fndecl = get_callee_fndecl (exp);
5899 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5900 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5901 int flags;
5903 /* When ASan is enabled, we don't want to expand some memory/string
5904 builtins and rely on libsanitizer's hooks. This allows us to avoid
5905 redundant checks and be sure, that possible overflow will be detected
5906 by ASan. */
5908 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5909 return expand_call (exp, target, ignore);
5911 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5912 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5914 /* When not optimizing, generate calls to library functions for a certain
5915 set of builtins. */
5916 if (!optimize
5917 && !called_as_built_in (fndecl)
5918 && fcode != BUILT_IN_FORK
5919 && fcode != BUILT_IN_EXECL
5920 && fcode != BUILT_IN_EXECV
5921 && fcode != BUILT_IN_EXECLP
5922 && fcode != BUILT_IN_EXECLE
5923 && fcode != BUILT_IN_EXECVP
5924 && fcode != BUILT_IN_EXECVE
5925 && fcode != BUILT_IN_ALLOCA
5926 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5927 && fcode != BUILT_IN_FREE
5928 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5929 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5930 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5931 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5932 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5933 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5934 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5935 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5936 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5937 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5938 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5939 && fcode != BUILT_IN_CHKP_BNDRET)
5940 return expand_call (exp, target, ignore);
5942 /* The built-in function expanders test for target == const0_rtx
5943 to determine whether the function's result will be ignored. */
5944 if (ignore)
5945 target = const0_rtx;
5947 /* If the result of a pure or const built-in function is ignored, and
5948 none of its arguments are volatile, we can avoid expanding the
5949 built-in call and just evaluate the arguments for side-effects. */
5950 if (target == const0_rtx
5951 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5952 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5954 bool volatilep = false;
5955 tree arg;
5956 call_expr_arg_iterator iter;
5958 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5959 if (TREE_THIS_VOLATILE (arg))
5961 volatilep = true;
5962 break;
5965 if (! volatilep)
5967 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5968 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5969 return const0_rtx;
5973 /* expand_builtin_with_bounds is supposed to be used for
5974 instrumented builtin calls. */
5975 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5977 switch (fcode)
5979 CASE_FLT_FN (BUILT_IN_FABS):
5980 case BUILT_IN_FABSD32:
5981 case BUILT_IN_FABSD64:
5982 case BUILT_IN_FABSD128:
5983 target = expand_builtin_fabs (exp, target, subtarget);
5984 if (target)
5985 return target;
5986 break;
5988 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5989 target = expand_builtin_copysign (exp, target, subtarget);
5990 if (target)
5991 return target;
5992 break;
5994 /* Just do a normal library call if we were unable to fold
5995 the values. */
5996 CASE_FLT_FN (BUILT_IN_CABS):
5997 break;
5999 CASE_FLT_FN (BUILT_IN_EXP):
6000 CASE_FLT_FN (BUILT_IN_EXP10):
6001 CASE_FLT_FN (BUILT_IN_POW10):
6002 CASE_FLT_FN (BUILT_IN_EXP2):
6003 CASE_FLT_FN (BUILT_IN_EXPM1):
6004 CASE_FLT_FN (BUILT_IN_LOGB):
6005 CASE_FLT_FN (BUILT_IN_LOG):
6006 CASE_FLT_FN (BUILT_IN_LOG10):
6007 CASE_FLT_FN (BUILT_IN_LOG2):
6008 CASE_FLT_FN (BUILT_IN_LOG1P):
6009 CASE_FLT_FN (BUILT_IN_TAN):
6010 CASE_FLT_FN (BUILT_IN_ASIN):
6011 CASE_FLT_FN (BUILT_IN_ACOS):
6012 CASE_FLT_FN (BUILT_IN_ATAN):
6013 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6014 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6015 because of possible accuracy problems. */
6016 if (! flag_unsafe_math_optimizations)
6017 break;
6018 CASE_FLT_FN (BUILT_IN_SQRT):
6019 CASE_FLT_FN (BUILT_IN_FLOOR):
6020 CASE_FLT_FN (BUILT_IN_CEIL):
6021 CASE_FLT_FN (BUILT_IN_TRUNC):
6022 CASE_FLT_FN (BUILT_IN_ROUND):
6023 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6024 CASE_FLT_FN (BUILT_IN_RINT):
6025 target = expand_builtin_mathfn (exp, target, subtarget);
6026 if (target)
6027 return target;
6028 break;
6030 CASE_FLT_FN (BUILT_IN_FMA):
6031 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6032 if (target)
6033 return target;
6034 break;
6036 CASE_FLT_FN (BUILT_IN_ILOGB):
6037 if (! flag_unsafe_math_optimizations)
6038 break;
6039 CASE_FLT_FN (BUILT_IN_ISINF):
6040 CASE_FLT_FN (BUILT_IN_FINITE):
6041 case BUILT_IN_ISFINITE:
6042 case BUILT_IN_ISNORMAL:
6043 target = expand_builtin_interclass_mathfn (exp, target);
6044 if (target)
6045 return target;
6046 break;
6048 CASE_FLT_FN (BUILT_IN_ICEIL):
6049 CASE_FLT_FN (BUILT_IN_LCEIL):
6050 CASE_FLT_FN (BUILT_IN_LLCEIL):
6051 CASE_FLT_FN (BUILT_IN_LFLOOR):
6052 CASE_FLT_FN (BUILT_IN_IFLOOR):
6053 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6054 target = expand_builtin_int_roundingfn (exp, target);
6055 if (target)
6056 return target;
6057 break;
6059 CASE_FLT_FN (BUILT_IN_IRINT):
6060 CASE_FLT_FN (BUILT_IN_LRINT):
6061 CASE_FLT_FN (BUILT_IN_LLRINT):
6062 CASE_FLT_FN (BUILT_IN_IROUND):
6063 CASE_FLT_FN (BUILT_IN_LROUND):
6064 CASE_FLT_FN (BUILT_IN_LLROUND):
6065 target = expand_builtin_int_roundingfn_2 (exp, target);
6066 if (target)
6067 return target;
6068 break;
6070 CASE_FLT_FN (BUILT_IN_POWI):
6071 target = expand_builtin_powi (exp, target);
6072 if (target)
6073 return target;
6074 break;
6076 CASE_FLT_FN (BUILT_IN_ATAN2):
6077 CASE_FLT_FN (BUILT_IN_LDEXP):
6078 CASE_FLT_FN (BUILT_IN_SCALB):
6079 CASE_FLT_FN (BUILT_IN_SCALBN):
6080 CASE_FLT_FN (BUILT_IN_SCALBLN):
6081 if (! flag_unsafe_math_optimizations)
6082 break;
6084 CASE_FLT_FN (BUILT_IN_FMOD):
6085 CASE_FLT_FN (BUILT_IN_REMAINDER):
6086 CASE_FLT_FN (BUILT_IN_DREM):
6087 CASE_FLT_FN (BUILT_IN_POW):
6088 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6089 if (target)
6090 return target;
6091 break;
6093 CASE_FLT_FN (BUILT_IN_CEXPI):
6094 target = expand_builtin_cexpi (exp, target);
6095 gcc_assert (target);
6096 return target;
6098 CASE_FLT_FN (BUILT_IN_SIN):
6099 CASE_FLT_FN (BUILT_IN_COS):
6100 if (! flag_unsafe_math_optimizations)
6101 break;
6102 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6103 if (target)
6104 return target;
6105 break;
6107 CASE_FLT_FN (BUILT_IN_SINCOS):
6108 if (! flag_unsafe_math_optimizations)
6109 break;
6110 target = expand_builtin_sincos (exp);
6111 if (target)
6112 return target;
6113 break;
6115 case BUILT_IN_APPLY_ARGS:
6116 return expand_builtin_apply_args ();
6118 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6119 FUNCTION with a copy of the parameters described by
6120 ARGUMENTS, and ARGSIZE. It returns a block of memory
6121 allocated on the stack into which is stored all the registers
6122 that might possibly be used for returning the result of a
6123 function. ARGUMENTS is the value returned by
6124 __builtin_apply_args. ARGSIZE is the number of bytes of
6125 arguments that must be copied. ??? How should this value be
6126 computed? We'll also need a safe worst case value for varargs
6127 functions. */
6128 case BUILT_IN_APPLY:
6129 if (!validate_arglist (exp, POINTER_TYPE,
6130 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6131 && !validate_arglist (exp, REFERENCE_TYPE,
6132 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6133 return const0_rtx;
6134 else
6136 rtx ops[3];
6138 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6139 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6140 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6142 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6145 /* __builtin_return (RESULT) causes the function to return the
6146 value described by RESULT. RESULT is address of the block of
6147 memory returned by __builtin_apply. */
6148 case BUILT_IN_RETURN:
6149 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6150 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6151 return const0_rtx;
6153 case BUILT_IN_SAVEREGS:
6154 return expand_builtin_saveregs ();
6156 case BUILT_IN_VA_ARG_PACK:
6157 /* All valid uses of __builtin_va_arg_pack () are removed during
6158 inlining. */
6159 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6160 return const0_rtx;
6162 case BUILT_IN_VA_ARG_PACK_LEN:
6163 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6164 inlining. */
6165 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6166 return const0_rtx;
6168 /* Return the address of the first anonymous stack arg. */
6169 case BUILT_IN_NEXT_ARG:
6170 if (fold_builtin_next_arg (exp, false))
6171 return const0_rtx;
6172 return expand_builtin_next_arg ();
6174 case BUILT_IN_CLEAR_CACHE:
6175 target = expand_builtin___clear_cache (exp);
6176 if (target)
6177 return target;
6178 break;
6180 case BUILT_IN_CLASSIFY_TYPE:
6181 return expand_builtin_classify_type (exp);
6183 case BUILT_IN_CONSTANT_P:
6184 return const0_rtx;
6186 case BUILT_IN_FRAME_ADDRESS:
6187 case BUILT_IN_RETURN_ADDRESS:
6188 return expand_builtin_frame_address (fndecl, exp);
6190 /* Returns the address of the area where the structure is returned.
6191 0 otherwise. */
6192 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6193 if (call_expr_nargs (exp) != 0
6194 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6195 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6196 return const0_rtx;
6197 else
6198 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6200 case BUILT_IN_ALLOCA:
6201 case BUILT_IN_ALLOCA_WITH_ALIGN:
6202 /* If the allocation stems from the declaration of a variable-sized
6203 object, it cannot accumulate. */
6204 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6205 if (target)
6206 return target;
6207 break;
6209 case BUILT_IN_STACK_SAVE:
6210 return expand_stack_save ();
6212 case BUILT_IN_STACK_RESTORE:
6213 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6214 return const0_rtx;
6216 case BUILT_IN_BSWAP16:
6217 case BUILT_IN_BSWAP32:
6218 case BUILT_IN_BSWAP64:
6219 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6220 if (target)
6221 return target;
6222 break;
6224 CASE_INT_FN (BUILT_IN_FFS):
6225 target = expand_builtin_unop (target_mode, exp, target,
6226 subtarget, ffs_optab);
6227 if (target)
6228 return target;
6229 break;
6231 CASE_INT_FN (BUILT_IN_CLZ):
6232 target = expand_builtin_unop (target_mode, exp, target,
6233 subtarget, clz_optab);
6234 if (target)
6235 return target;
6236 break;
6238 CASE_INT_FN (BUILT_IN_CTZ):
6239 target = expand_builtin_unop (target_mode, exp, target,
6240 subtarget, ctz_optab);
6241 if (target)
6242 return target;
6243 break;
6245 CASE_INT_FN (BUILT_IN_CLRSB):
6246 target = expand_builtin_unop (target_mode, exp, target,
6247 subtarget, clrsb_optab);
6248 if (target)
6249 return target;
6250 break;
6252 CASE_INT_FN (BUILT_IN_POPCOUNT):
6253 target = expand_builtin_unop (target_mode, exp, target,
6254 subtarget, popcount_optab);
6255 if (target)
6256 return target;
6257 break;
6259 CASE_INT_FN (BUILT_IN_PARITY):
6260 target = expand_builtin_unop (target_mode, exp, target,
6261 subtarget, parity_optab);
6262 if (target)
6263 return target;
6264 break;
6266 case BUILT_IN_STRLEN:
6267 target = expand_builtin_strlen (exp, target, target_mode);
6268 if (target)
6269 return target;
6270 break;
6272 case BUILT_IN_STRCPY:
6273 target = expand_builtin_strcpy (exp, target);
6274 if (target)
6275 return target;
6276 break;
6278 case BUILT_IN_STRNCPY:
6279 target = expand_builtin_strncpy (exp, target);
6280 if (target)
6281 return target;
6282 break;
6284 case BUILT_IN_STPCPY:
6285 target = expand_builtin_stpcpy (exp, target, mode);
6286 if (target)
6287 return target;
6288 break;
6290 case BUILT_IN_MEMCPY:
6291 target = expand_builtin_memcpy (exp, target);
6292 if (target)
6293 return target;
6294 break;
6296 case BUILT_IN_MEMPCPY:
6297 target = expand_builtin_mempcpy (exp, target, mode);
6298 if (target)
6299 return target;
6300 break;
6302 case BUILT_IN_MEMSET:
6303 target = expand_builtin_memset (exp, target, mode);
6304 if (target)
6305 return target;
6306 break;
6308 case BUILT_IN_BZERO:
6309 target = expand_builtin_bzero (exp);
6310 if (target)
6311 return target;
6312 break;
6314 case BUILT_IN_STRCMP:
6315 target = expand_builtin_strcmp (exp, target);
6316 if (target)
6317 return target;
6318 break;
6320 case BUILT_IN_STRNCMP:
6321 target = expand_builtin_strncmp (exp, target, mode);
6322 if (target)
6323 return target;
6324 break;
6326 case BUILT_IN_BCMP:
6327 case BUILT_IN_MEMCMP:
6328 target = expand_builtin_memcmp (exp, target, mode);
6329 if (target)
6330 return target;
6331 break;
6333 case BUILT_IN_SETJMP:
6334 /* This should have been lowered to the builtins below. */
6335 gcc_unreachable ();
6337 case BUILT_IN_SETJMP_SETUP:
6338 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6339 and the receiver label. */
6340 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6342 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6343 VOIDmode, EXPAND_NORMAL);
6344 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6345 rtx label_r = label_rtx (label);
6347 /* This is copied from the handling of non-local gotos. */
6348 expand_builtin_setjmp_setup (buf_addr, label_r);
6349 nonlocal_goto_handler_labels
6350 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6351 nonlocal_goto_handler_labels);
6352 /* ??? Do not let expand_label treat us as such since we would
6353 not want to be both on the list of non-local labels and on
6354 the list of forced labels. */
6355 FORCED_LABEL (label) = 0;
6356 return const0_rtx;
6358 break;
6360 case BUILT_IN_SETJMP_RECEIVER:
6361 /* __builtin_setjmp_receiver is passed the receiver label. */
6362 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6364 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6365 rtx label_r = label_rtx (label);
6367 expand_builtin_setjmp_receiver (label_r);
6368 return const0_rtx;
6370 break;
6372 /* __builtin_longjmp is passed a pointer to an array of five words.
6373 It's similar to the C library longjmp function but works with
6374 __builtin_setjmp above. */
6375 case BUILT_IN_LONGJMP:
6376 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6378 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6379 VOIDmode, EXPAND_NORMAL);
6380 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6382 if (value != const1_rtx)
6384 error ("%<__builtin_longjmp%> second argument must be 1");
6385 return const0_rtx;
6388 expand_builtin_longjmp (buf_addr, value);
6389 return const0_rtx;
6391 break;
6393 case BUILT_IN_NONLOCAL_GOTO:
6394 target = expand_builtin_nonlocal_goto (exp);
6395 if (target)
6396 return target;
6397 break;
6399 /* This updates the setjmp buffer that is its argument with the value
6400 of the current stack pointer. */
6401 case BUILT_IN_UPDATE_SETJMP_BUF:
6402 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6404 rtx buf_addr
6405 = expand_normal (CALL_EXPR_ARG (exp, 0));
6407 expand_builtin_update_setjmp_buf (buf_addr);
6408 return const0_rtx;
6410 break;
6412 case BUILT_IN_TRAP:
6413 expand_builtin_trap ();
6414 return const0_rtx;
6416 case BUILT_IN_UNREACHABLE:
6417 expand_builtin_unreachable ();
6418 return const0_rtx;
6420 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6421 case BUILT_IN_SIGNBITD32:
6422 case BUILT_IN_SIGNBITD64:
6423 case BUILT_IN_SIGNBITD128:
6424 target = expand_builtin_signbit (exp, target);
6425 if (target)
6426 return target;
6427 break;
6429 /* Various hooks for the DWARF 2 __throw routine. */
6430 case BUILT_IN_UNWIND_INIT:
6431 expand_builtin_unwind_init ();
6432 return const0_rtx;
6433 case BUILT_IN_DWARF_CFA:
6434 return virtual_cfa_rtx;
6435 #ifdef DWARF2_UNWIND_INFO
6436 case BUILT_IN_DWARF_SP_COLUMN:
6437 return expand_builtin_dwarf_sp_column ();
6438 case BUILT_IN_INIT_DWARF_REG_SIZES:
6439 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6440 return const0_rtx;
6441 #endif
6442 case BUILT_IN_FROB_RETURN_ADDR:
6443 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6444 case BUILT_IN_EXTRACT_RETURN_ADDR:
6445 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6446 case BUILT_IN_EH_RETURN:
6447 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6448 CALL_EXPR_ARG (exp, 1));
6449 return const0_rtx;
6450 #ifdef EH_RETURN_DATA_REGNO
6451 case BUILT_IN_EH_RETURN_DATA_REGNO:
6452 return expand_builtin_eh_return_data_regno (exp);
6453 #endif
6454 case BUILT_IN_EXTEND_POINTER:
6455 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6456 case BUILT_IN_EH_POINTER:
6457 return expand_builtin_eh_pointer (exp);
6458 case BUILT_IN_EH_FILTER:
6459 return expand_builtin_eh_filter (exp);
6460 case BUILT_IN_EH_COPY_VALUES:
6461 return expand_builtin_eh_copy_values (exp);
6463 case BUILT_IN_VA_START:
6464 return expand_builtin_va_start (exp);
6465 case BUILT_IN_VA_END:
6466 return expand_builtin_va_end (exp);
6467 case BUILT_IN_VA_COPY:
6468 return expand_builtin_va_copy (exp);
6469 case BUILT_IN_EXPECT:
6470 return expand_builtin_expect (exp, target);
6471 case BUILT_IN_ASSUME_ALIGNED:
6472 return expand_builtin_assume_aligned (exp, target);
6473 case BUILT_IN_PREFETCH:
6474 expand_builtin_prefetch (exp);
6475 return const0_rtx;
6477 case BUILT_IN_INIT_TRAMPOLINE:
6478 return expand_builtin_init_trampoline (exp, true);
6479 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6480 return expand_builtin_init_trampoline (exp, false);
6481 case BUILT_IN_ADJUST_TRAMPOLINE:
6482 return expand_builtin_adjust_trampoline (exp);
6484 case BUILT_IN_FORK:
6485 case BUILT_IN_EXECL:
6486 case BUILT_IN_EXECV:
6487 case BUILT_IN_EXECLP:
6488 case BUILT_IN_EXECLE:
6489 case BUILT_IN_EXECVP:
6490 case BUILT_IN_EXECVE:
6491 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6492 if (target)
6493 return target;
6494 break;
6496 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6497 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6498 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6499 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6500 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6501 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6502 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6503 if (target)
6504 return target;
6505 break;
6507 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6508 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6509 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6510 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6511 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6512 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6513 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6514 if (target)
6515 return target;
6516 break;
6518 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6519 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6520 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6521 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6522 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6523 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6524 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6525 if (target)
6526 return target;
6527 break;
6529 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6530 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6531 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6532 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6533 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6534 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6535 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6536 if (target)
6537 return target;
6538 break;
6540 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6541 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6542 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6543 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6544 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6545 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6546 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6547 if (target)
6548 return target;
6549 break;
6551 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6552 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6553 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6554 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6555 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6556 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6557 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6558 if (target)
6559 return target;
6560 break;
6562 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6563 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6564 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6565 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6566 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6567 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6568 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6569 if (target)
6570 return target;
6571 break;
6573 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6574 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6575 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6576 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6577 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6578 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6579 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6580 if (target)
6581 return target;
6582 break;
6584 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6585 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6586 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6587 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6588 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6589 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6590 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6591 if (target)
6592 return target;
6593 break;
6595 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6596 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6597 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6598 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6599 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6600 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6601 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6602 if (target)
6603 return target;
6604 break;
6606 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6607 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6608 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6609 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6610 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6611 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6612 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6613 if (target)
6614 return target;
6615 break;
6617 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6618 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6619 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6620 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6621 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6622 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6623 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6624 if (target)
6625 return target;
6626 break;
6628 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6629 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6630 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6631 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6632 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6633 if (mode == VOIDmode)
6634 mode = TYPE_MODE (boolean_type_node);
6635 if (!target || !register_operand (target, mode))
6636 target = gen_reg_rtx (mode);
6638 mode = get_builtin_sync_mode
6639 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6640 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6641 if (target)
6642 return target;
6643 break;
6645 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6646 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6647 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6648 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6649 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6650 mode = get_builtin_sync_mode
6651 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6652 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6653 if (target)
6654 return target;
6655 break;
6657 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6658 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6659 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6660 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6661 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6662 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6663 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6664 if (target)
6665 return target;
6666 break;
6668 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6669 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6670 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6671 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6672 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6673 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6674 expand_builtin_sync_lock_release (mode, exp);
6675 return const0_rtx;
6677 case BUILT_IN_SYNC_SYNCHRONIZE:
6678 expand_builtin_sync_synchronize ();
6679 return const0_rtx;
6681 case BUILT_IN_ATOMIC_EXCHANGE_1:
6682 case BUILT_IN_ATOMIC_EXCHANGE_2:
6683 case BUILT_IN_ATOMIC_EXCHANGE_4:
6684 case BUILT_IN_ATOMIC_EXCHANGE_8:
6685 case BUILT_IN_ATOMIC_EXCHANGE_16:
6686 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6687 target = expand_builtin_atomic_exchange (mode, exp, target);
6688 if (target)
6689 return target;
6690 break;
6692 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6693 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6694 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6695 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6696 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6698 unsigned int nargs, z;
6699 vec<tree, va_gc> *vec;
6701 mode =
6702 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6703 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6704 if (target)
6705 return target;
6707 /* If this is turned into an external library call, the weak parameter
6708 must be dropped to match the expected parameter list. */
6709 nargs = call_expr_nargs (exp);
6710 vec_alloc (vec, nargs - 1);
6711 for (z = 0; z < 3; z++)
6712 vec->quick_push (CALL_EXPR_ARG (exp, z));
6713 /* Skip the boolean weak parameter. */
6714 for (z = 4; z < 6; z++)
6715 vec->quick_push (CALL_EXPR_ARG (exp, z));
6716 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6717 break;
6720 case BUILT_IN_ATOMIC_LOAD_1:
6721 case BUILT_IN_ATOMIC_LOAD_2:
6722 case BUILT_IN_ATOMIC_LOAD_4:
6723 case BUILT_IN_ATOMIC_LOAD_8:
6724 case BUILT_IN_ATOMIC_LOAD_16:
6725 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6726 target = expand_builtin_atomic_load (mode, exp, target);
6727 if (target)
6728 return target;
6729 break;
6731 case BUILT_IN_ATOMIC_STORE_1:
6732 case BUILT_IN_ATOMIC_STORE_2:
6733 case BUILT_IN_ATOMIC_STORE_4:
6734 case BUILT_IN_ATOMIC_STORE_8:
6735 case BUILT_IN_ATOMIC_STORE_16:
6736 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6737 target = expand_builtin_atomic_store (mode, exp);
6738 if (target)
6739 return const0_rtx;
6740 break;
6742 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6743 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6744 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6745 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6746 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6748 enum built_in_function lib;
6749 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6750 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6751 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6752 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6753 ignore, lib);
6754 if (target)
6755 return target;
6756 break;
6758 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6759 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6760 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6761 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6762 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6764 enum built_in_function lib;
6765 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6766 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6767 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6768 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6769 ignore, lib);
6770 if (target)
6771 return target;
6772 break;
6774 case BUILT_IN_ATOMIC_AND_FETCH_1:
6775 case BUILT_IN_ATOMIC_AND_FETCH_2:
6776 case BUILT_IN_ATOMIC_AND_FETCH_4:
6777 case BUILT_IN_ATOMIC_AND_FETCH_8:
6778 case BUILT_IN_ATOMIC_AND_FETCH_16:
6780 enum built_in_function lib;
6781 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6782 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6783 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6784 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6785 ignore, lib);
6786 if (target)
6787 return target;
6788 break;
6790 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6791 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6792 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6793 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6794 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6796 enum built_in_function lib;
6797 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6798 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6799 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6800 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6801 ignore, lib);
6802 if (target)
6803 return target;
6804 break;
6806 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6807 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6808 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6809 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6810 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6812 enum built_in_function lib;
6813 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6814 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6815 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6816 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6817 ignore, lib);
6818 if (target)
6819 return target;
6820 break;
6822 case BUILT_IN_ATOMIC_OR_FETCH_1:
6823 case BUILT_IN_ATOMIC_OR_FETCH_2:
6824 case BUILT_IN_ATOMIC_OR_FETCH_4:
6825 case BUILT_IN_ATOMIC_OR_FETCH_8:
6826 case BUILT_IN_ATOMIC_OR_FETCH_16:
6828 enum built_in_function lib;
6829 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6830 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6831 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6832 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6833 ignore, lib);
6834 if (target)
6835 return target;
6836 break;
6838 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6839 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6840 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6841 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6842 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6843 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6844 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6845 ignore, BUILT_IN_NONE);
6846 if (target)
6847 return target;
6848 break;
6850 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6851 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6852 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6853 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6854 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6855 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6856 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6857 ignore, BUILT_IN_NONE);
6858 if (target)
6859 return target;
6860 break;
6862 case BUILT_IN_ATOMIC_FETCH_AND_1:
6863 case BUILT_IN_ATOMIC_FETCH_AND_2:
6864 case BUILT_IN_ATOMIC_FETCH_AND_4:
6865 case BUILT_IN_ATOMIC_FETCH_AND_8:
6866 case BUILT_IN_ATOMIC_FETCH_AND_16:
6867 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6868 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6869 ignore, BUILT_IN_NONE);
6870 if (target)
6871 return target;
6872 break;
6874 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6875 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6876 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6877 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6878 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6879 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6880 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6881 ignore, BUILT_IN_NONE);
6882 if (target)
6883 return target;
6884 break;
6886 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6887 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6888 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6889 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6890 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6891 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6892 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6893 ignore, BUILT_IN_NONE);
6894 if (target)
6895 return target;
6896 break;
6898 case BUILT_IN_ATOMIC_FETCH_OR_1:
6899 case BUILT_IN_ATOMIC_FETCH_OR_2:
6900 case BUILT_IN_ATOMIC_FETCH_OR_4:
6901 case BUILT_IN_ATOMIC_FETCH_OR_8:
6902 case BUILT_IN_ATOMIC_FETCH_OR_16:
6903 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6904 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6905 ignore, BUILT_IN_NONE);
6906 if (target)
6907 return target;
6908 break;
6910 case BUILT_IN_ATOMIC_TEST_AND_SET:
6911 return expand_builtin_atomic_test_and_set (exp, target);
6913 case BUILT_IN_ATOMIC_CLEAR:
6914 return expand_builtin_atomic_clear (exp);
6916 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6917 return expand_builtin_atomic_always_lock_free (exp);
6919 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6920 target = expand_builtin_atomic_is_lock_free (exp);
6921 if (target)
6922 return target;
6923 break;
6925 case BUILT_IN_ATOMIC_THREAD_FENCE:
6926 expand_builtin_atomic_thread_fence (exp);
6927 return const0_rtx;
6929 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6930 expand_builtin_atomic_signal_fence (exp);
6931 return const0_rtx;
6933 case BUILT_IN_OBJECT_SIZE:
6934 return expand_builtin_object_size (exp);
6936 case BUILT_IN_MEMCPY_CHK:
6937 case BUILT_IN_MEMPCPY_CHK:
6938 case BUILT_IN_MEMMOVE_CHK:
6939 case BUILT_IN_MEMSET_CHK:
6940 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6941 if (target)
6942 return target;
6943 break;
6945 case BUILT_IN_STRCPY_CHK:
6946 case BUILT_IN_STPCPY_CHK:
6947 case BUILT_IN_STRNCPY_CHK:
6948 case BUILT_IN_STPNCPY_CHK:
6949 case BUILT_IN_STRCAT_CHK:
6950 case BUILT_IN_STRNCAT_CHK:
6951 case BUILT_IN_SNPRINTF_CHK:
6952 case BUILT_IN_VSNPRINTF_CHK:
6953 maybe_emit_chk_warning (exp, fcode);
6954 break;
6956 case BUILT_IN_SPRINTF_CHK:
6957 case BUILT_IN_VSPRINTF_CHK:
6958 maybe_emit_sprintf_chk_warning (exp, fcode);
6959 break;
6961 case BUILT_IN_FREE:
6962 if (warn_free_nonheap_object)
6963 maybe_emit_free_warning (exp);
6964 break;
6966 case BUILT_IN_THREAD_POINTER:
6967 return expand_builtin_thread_pointer (exp, target);
6969 case BUILT_IN_SET_THREAD_POINTER:
6970 expand_builtin_set_thread_pointer (exp);
6971 return const0_rtx;
6973 case BUILT_IN_CILK_DETACH:
6974 expand_builtin_cilk_detach (exp);
6975 return const0_rtx;
6977 case BUILT_IN_CILK_POP_FRAME:
6978 expand_builtin_cilk_pop_frame (exp);
6979 return const0_rtx;
6981 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6982 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6983 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6984 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6985 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6986 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6987 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6988 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6989 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6990 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6991 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6992 /* We allow user CHKP builtins if Pointer Bounds
6993 Checker is off. */
6994 if (!chkp_function_instrumented_p (current_function_decl))
6996 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6997 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6998 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6999 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7000 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7001 return expand_normal (CALL_EXPR_ARG (exp, 0));
7002 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7003 return expand_normal (size_zero_node);
7004 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7005 return expand_normal (size_int (-1));
7006 else
7007 return const0_rtx;
7009 /* FALLTHROUGH */
7011 case BUILT_IN_CHKP_BNDMK:
7012 case BUILT_IN_CHKP_BNDSTX:
7013 case BUILT_IN_CHKP_BNDCL:
7014 case BUILT_IN_CHKP_BNDCU:
7015 case BUILT_IN_CHKP_BNDLDX:
7016 case BUILT_IN_CHKP_BNDRET:
7017 case BUILT_IN_CHKP_INTERSECT:
7018 case BUILT_IN_CHKP_NARROW:
7019 case BUILT_IN_CHKP_EXTRACT_LOWER:
7020 case BUILT_IN_CHKP_EXTRACT_UPPER:
7021 /* Software implementation of Pointer Bounds Checker is NYI.
7022 Target support is required. */
7023 error ("Your target platform does not support -fcheck-pointer-bounds");
7024 break;
7026 default: /* just do library call, if unknown builtin */
7027 break;
7030 /* The switch statement above can drop through to cause the function
7031 to be called normally. */
7032 return expand_call (exp, target, ignore);
7035 /* Similar to expand_builtin but is used for instrumented calls. */
7038 expand_builtin_with_bounds (tree exp, rtx target,
7039 rtx subtarget ATTRIBUTE_UNUSED,
7040 machine_mode mode, int ignore)
7042 tree fndecl = get_callee_fndecl (exp);
7043 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7045 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7047 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7048 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7050 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7051 && fcode < END_CHKP_BUILTINS);
7053 switch (fcode)
7055 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7056 target = expand_builtin_memcpy_with_bounds (exp, target);
7057 if (target)
7058 return target;
7059 break;
7061 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7062 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7063 if (target)
7064 return target;
7065 break;
7067 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7068 target = expand_builtin_memset_with_bounds (exp, target, mode);
7069 if (target)
7070 return target;
7071 break;
7073 default:
7074 break;
7077 /* The switch statement above can drop through to cause the function
7078 to be called normally. */
7079 return expand_call (exp, target, ignore);
7082 /* Determine whether a tree node represents a call to a built-in
7083 function. If the tree T is a call to a built-in function with
7084 the right number of arguments of the appropriate types, return
7085 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7086 Otherwise the return value is END_BUILTINS. */
7088 enum built_in_function
7089 builtin_mathfn_code (const_tree t)
7091 const_tree fndecl, arg, parmlist;
7092 const_tree argtype, parmtype;
7093 const_call_expr_arg_iterator iter;
7095 if (TREE_CODE (t) != CALL_EXPR
7096 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7097 return END_BUILTINS;
7099 fndecl = get_callee_fndecl (t);
7100 if (fndecl == NULL_TREE
7101 || TREE_CODE (fndecl) != FUNCTION_DECL
7102 || ! DECL_BUILT_IN (fndecl)
7103 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7104 return END_BUILTINS;
7106 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7107 init_const_call_expr_arg_iterator (t, &iter);
7108 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7110 /* If a function doesn't take a variable number of arguments,
7111 the last element in the list will have type `void'. */
7112 parmtype = TREE_VALUE (parmlist);
7113 if (VOID_TYPE_P (parmtype))
7115 if (more_const_call_expr_args_p (&iter))
7116 return END_BUILTINS;
7117 return DECL_FUNCTION_CODE (fndecl);
7120 if (! more_const_call_expr_args_p (&iter))
7121 return END_BUILTINS;
7123 arg = next_const_call_expr_arg (&iter);
7124 argtype = TREE_TYPE (arg);
7126 if (SCALAR_FLOAT_TYPE_P (parmtype))
7128 if (! SCALAR_FLOAT_TYPE_P (argtype))
7129 return END_BUILTINS;
7131 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7133 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7134 return END_BUILTINS;
7136 else if (POINTER_TYPE_P (parmtype))
7138 if (! POINTER_TYPE_P (argtype))
7139 return END_BUILTINS;
7141 else if (INTEGRAL_TYPE_P (parmtype))
7143 if (! INTEGRAL_TYPE_P (argtype))
7144 return END_BUILTINS;
7146 else
7147 return END_BUILTINS;
7150 /* Variable-length argument list. */
7151 return DECL_FUNCTION_CODE (fndecl);
7154 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7155 evaluate to a constant. */
7157 static tree
7158 fold_builtin_constant_p (tree arg)
7160 /* We return 1 for a numeric type that's known to be a constant
7161 value at compile-time or for an aggregate type that's a
7162 literal constant. */
7163 STRIP_NOPS (arg);
7165 /* If we know this is a constant, emit the constant of one. */
7166 if (CONSTANT_CLASS_P (arg)
7167 || (TREE_CODE (arg) == CONSTRUCTOR
7168 && TREE_CONSTANT (arg)))
7169 return integer_one_node;
7170 if (TREE_CODE (arg) == ADDR_EXPR)
7172 tree op = TREE_OPERAND (arg, 0);
7173 if (TREE_CODE (op) == STRING_CST
7174 || (TREE_CODE (op) == ARRAY_REF
7175 && integer_zerop (TREE_OPERAND (op, 1))
7176 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7177 return integer_one_node;
7180 /* If this expression has side effects, show we don't know it to be a
7181 constant. Likewise if it's a pointer or aggregate type since in
7182 those case we only want literals, since those are only optimized
7183 when generating RTL, not later.
7184 And finally, if we are compiling an initializer, not code, we
7185 need to return a definite result now; there's not going to be any
7186 more optimization done. */
7187 if (TREE_SIDE_EFFECTS (arg)
7188 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7189 || POINTER_TYPE_P (TREE_TYPE (arg))
7190 || cfun == 0
7191 || folding_initializer
7192 || force_folding_builtin_constant_p)
7193 return integer_zero_node;
7195 return NULL_TREE;
7198 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7199 return it as a truthvalue. */
7201 static tree
7202 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7203 tree predictor)
7205 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7207 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7208 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7209 ret_type = TREE_TYPE (TREE_TYPE (fn));
7210 pred_type = TREE_VALUE (arg_types);
7211 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7213 pred = fold_convert_loc (loc, pred_type, pred);
7214 expected = fold_convert_loc (loc, expected_type, expected);
7215 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7216 predictor);
7218 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7219 build_int_cst (ret_type, 0));
7222 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7223 NULL_TREE if no simplification is possible. */
7225 tree
7226 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7228 tree inner, fndecl, inner_arg0;
7229 enum tree_code code;
7231 /* Distribute the expected value over short-circuiting operators.
7232 See through the cast from truthvalue_type_node to long. */
7233 inner_arg0 = arg0;
7234 while (CONVERT_EXPR_P (inner_arg0)
7235 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7236 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7237 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7239 /* If this is a builtin_expect within a builtin_expect keep the
7240 inner one. See through a comparison against a constant. It
7241 might have been added to create a thruthvalue. */
7242 inner = inner_arg0;
7244 if (COMPARISON_CLASS_P (inner)
7245 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7246 inner = TREE_OPERAND (inner, 0);
7248 if (TREE_CODE (inner) == CALL_EXPR
7249 && (fndecl = get_callee_fndecl (inner))
7250 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7251 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7252 return arg0;
7254 inner = inner_arg0;
7255 code = TREE_CODE (inner);
7256 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7258 tree op0 = TREE_OPERAND (inner, 0);
7259 tree op1 = TREE_OPERAND (inner, 1);
7261 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7262 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7263 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7265 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7268 /* If the argument isn't invariant then there's nothing else we can do. */
7269 if (!TREE_CONSTANT (inner_arg0))
7270 return NULL_TREE;
7272 /* If we expect that a comparison against the argument will fold to
7273 a constant return the constant. In practice, this means a true
7274 constant or the address of a non-weak symbol. */
7275 inner = inner_arg0;
7276 STRIP_NOPS (inner);
7277 if (TREE_CODE (inner) == ADDR_EXPR)
7281 inner = TREE_OPERAND (inner, 0);
7283 while (TREE_CODE (inner) == COMPONENT_REF
7284 || TREE_CODE (inner) == ARRAY_REF);
7285 if ((TREE_CODE (inner) == VAR_DECL
7286 || TREE_CODE (inner) == FUNCTION_DECL)
7287 && DECL_WEAK (inner))
7288 return NULL_TREE;
7291 /* Otherwise, ARG0 already has the proper type for the return value. */
7292 return arg0;
7295 /* Fold a call to __builtin_classify_type with argument ARG. */
7297 static tree
7298 fold_builtin_classify_type (tree arg)
7300 if (arg == 0)
7301 return build_int_cst (integer_type_node, no_type_class);
7303 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7306 /* Fold a call to __builtin_strlen with argument ARG. */
7308 static tree
7309 fold_builtin_strlen (location_t loc, tree type, tree arg)
7311 if (!validate_arg (arg, POINTER_TYPE))
7312 return NULL_TREE;
7313 else
7315 tree len = c_strlen (arg, 0);
7317 if (len)
7318 return fold_convert_loc (loc, type, len);
7320 return NULL_TREE;
7324 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7326 static tree
7327 fold_builtin_inf (location_t loc, tree type, int warn)
7329 REAL_VALUE_TYPE real;
7331 /* __builtin_inff is intended to be usable to define INFINITY on all
7332 targets. If an infinity is not available, INFINITY expands "to a
7333 positive constant of type float that overflows at translation
7334 time", footnote "In this case, using INFINITY will violate the
7335 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7336 Thus we pedwarn to ensure this constraint violation is
7337 diagnosed. */
7338 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7339 pedwarn (loc, 0, "target format does not support infinity");
7341 real_inf (&real);
7342 return build_real (type, real);
7345 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7347 static tree
7348 fold_builtin_nan (tree arg, tree type, int quiet)
7350 REAL_VALUE_TYPE real;
7351 const char *str;
7353 if (!validate_arg (arg, POINTER_TYPE))
7354 return NULL_TREE;
7355 str = c_getstr (arg);
7356 if (!str)
7357 return NULL_TREE;
7359 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7360 return NULL_TREE;
7362 return build_real (type, real);
7365 /* Return true if the floating point expression T has an integer value.
7366 We also allow +Inf, -Inf and NaN to be considered integer values. */
7368 static bool
7369 integer_valued_real_p (tree t)
7371 switch (TREE_CODE (t))
7373 case FLOAT_EXPR:
7374 return true;
7376 case ABS_EXPR:
7377 case SAVE_EXPR:
7378 return integer_valued_real_p (TREE_OPERAND (t, 0));
7380 case COMPOUND_EXPR:
7381 case MODIFY_EXPR:
7382 case BIND_EXPR:
7383 return integer_valued_real_p (TREE_OPERAND (t, 1));
7385 case PLUS_EXPR:
7386 case MINUS_EXPR:
7387 case MULT_EXPR:
7388 case MIN_EXPR:
7389 case MAX_EXPR:
7390 return integer_valued_real_p (TREE_OPERAND (t, 0))
7391 && integer_valued_real_p (TREE_OPERAND (t, 1));
7393 case COND_EXPR:
7394 return integer_valued_real_p (TREE_OPERAND (t, 1))
7395 && integer_valued_real_p (TREE_OPERAND (t, 2));
7397 case REAL_CST:
7398 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7400 CASE_CONVERT:
7402 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7403 if (TREE_CODE (type) == INTEGER_TYPE)
7404 return true;
7405 if (TREE_CODE (type) == REAL_TYPE)
7406 return integer_valued_real_p (TREE_OPERAND (t, 0));
7407 break;
7410 case CALL_EXPR:
7411 switch (builtin_mathfn_code (t))
7413 CASE_FLT_FN (BUILT_IN_CEIL):
7414 CASE_FLT_FN (BUILT_IN_FLOOR):
7415 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7416 CASE_FLT_FN (BUILT_IN_RINT):
7417 CASE_FLT_FN (BUILT_IN_ROUND):
7418 CASE_FLT_FN (BUILT_IN_TRUNC):
7419 return true;
7421 CASE_FLT_FN (BUILT_IN_FMIN):
7422 CASE_FLT_FN (BUILT_IN_FMAX):
7423 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7424 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7426 default:
7427 break;
7429 break;
7431 default:
7432 break;
7434 return false;
7437 /* FNDECL is assumed to be a builtin where truncation can be propagated
7438 across (for instance floor((double)f) == (double)floorf (f).
7439 Do the transformation for a call with argument ARG. */
7441 static tree
7442 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7444 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7446 if (!validate_arg (arg, REAL_TYPE))
7447 return NULL_TREE;
7449 /* Integer rounding functions are idempotent. */
7450 if (fcode == builtin_mathfn_code (arg))
7451 return arg;
7453 /* If argument is already integer valued, and we don't need to worry
7454 about setting errno, there's no need to perform rounding. */
7455 if (! flag_errno_math && integer_valued_real_p (arg))
7456 return arg;
7458 if (optimize)
7460 tree arg0 = strip_float_extensions (arg);
7461 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7462 tree newtype = TREE_TYPE (arg0);
7463 tree decl;
7465 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7466 && (decl = mathfn_built_in (newtype, fcode)))
7467 return fold_convert_loc (loc, ftype,
7468 build_call_expr_loc (loc, decl, 1,
7469 fold_convert_loc (loc,
7470 newtype,
7471 arg0)));
7473 return NULL_TREE;
7476 /* FNDECL is assumed to be builtin which can narrow the FP type of
7477 the argument, for instance lround((double)f) -> lroundf (f).
7478 Do the transformation for a call with argument ARG. */
7480 static tree
7481 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7483 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7485 if (!validate_arg (arg, REAL_TYPE))
7486 return NULL_TREE;
7488 /* If argument is already integer valued, and we don't need to worry
7489 about setting errno, there's no need to perform rounding. */
7490 if (! flag_errno_math && integer_valued_real_p (arg))
7491 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7492 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7494 if (optimize)
7496 tree ftype = TREE_TYPE (arg);
7497 tree arg0 = strip_float_extensions (arg);
7498 tree newtype = TREE_TYPE (arg0);
7499 tree decl;
7501 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7502 && (decl = mathfn_built_in (newtype, fcode)))
7503 return build_call_expr_loc (loc, decl, 1,
7504 fold_convert_loc (loc, newtype, arg0));
7507 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7508 sizeof (int) == sizeof (long). */
7509 if (TYPE_PRECISION (integer_type_node)
7510 == TYPE_PRECISION (long_integer_type_node))
7512 tree newfn = NULL_TREE;
7513 switch (fcode)
7515 CASE_FLT_FN (BUILT_IN_ICEIL):
7516 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7517 break;
7519 CASE_FLT_FN (BUILT_IN_IFLOOR):
7520 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7521 break;
7523 CASE_FLT_FN (BUILT_IN_IROUND):
7524 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7525 break;
7527 CASE_FLT_FN (BUILT_IN_IRINT):
7528 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7529 break;
7531 default:
7532 break;
7535 if (newfn)
7537 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7538 return fold_convert_loc (loc,
7539 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7543 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7544 sizeof (long long) == sizeof (long). */
7545 if (TYPE_PRECISION (long_long_integer_type_node)
7546 == TYPE_PRECISION (long_integer_type_node))
7548 tree newfn = NULL_TREE;
7549 switch (fcode)
7551 CASE_FLT_FN (BUILT_IN_LLCEIL):
7552 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7553 break;
7555 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7556 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7557 break;
7559 CASE_FLT_FN (BUILT_IN_LLROUND):
7560 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7561 break;
7563 CASE_FLT_FN (BUILT_IN_LLRINT):
7564 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7565 break;
7567 default:
7568 break;
7571 if (newfn)
7573 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7574 return fold_convert_loc (loc,
7575 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7579 return NULL_TREE;
7582 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7583 return type. Return NULL_TREE if no simplification can be made. */
7585 static tree
7586 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7588 tree res;
7590 if (!validate_arg (arg, COMPLEX_TYPE)
7591 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7592 return NULL_TREE;
7594 /* Calculate the result when the argument is a constant. */
7595 if (TREE_CODE (arg) == COMPLEX_CST
7596 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7597 type, mpfr_hypot)))
7598 return res;
7600 if (TREE_CODE (arg) == COMPLEX_EXPR)
7602 tree real = TREE_OPERAND (arg, 0);
7603 tree imag = TREE_OPERAND (arg, 1);
7605 /* If either part is zero, cabs is fabs of the other. */
7606 if (real_zerop (real))
7607 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7608 if (real_zerop (imag))
7609 return fold_build1_loc (loc, ABS_EXPR, type, real);
7611 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7612 if (flag_unsafe_math_optimizations
7613 && operand_equal_p (real, imag, OEP_PURE_SAME))
7615 const REAL_VALUE_TYPE sqrt2_trunc
7616 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7617 STRIP_NOPS (real);
7618 return fold_build2_loc (loc, MULT_EXPR, type,
7619 fold_build1_loc (loc, ABS_EXPR, type, real),
7620 build_real (type, sqrt2_trunc));
7624 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7625 if (TREE_CODE (arg) == NEGATE_EXPR
7626 || TREE_CODE (arg) == CONJ_EXPR)
7627 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7629 /* Don't do this when optimizing for size. */
7630 if (flag_unsafe_math_optimizations
7631 && optimize && optimize_function_for_speed_p (cfun))
7633 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7635 if (sqrtfn != NULL_TREE)
7637 tree rpart, ipart, result;
7639 arg = builtin_save_expr (arg);
7641 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7642 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7644 rpart = builtin_save_expr (rpart);
7645 ipart = builtin_save_expr (ipart);
7647 result = fold_build2_loc (loc, PLUS_EXPR, type,
7648 fold_build2_loc (loc, MULT_EXPR, type,
7649 rpart, rpart),
7650 fold_build2_loc (loc, MULT_EXPR, type,
7651 ipart, ipart));
7653 return build_call_expr_loc (loc, sqrtfn, 1, result);
7657 return NULL_TREE;
7660 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7661 complex tree type of the result. If NEG is true, the imaginary
7662 zero is negative. */
7664 static tree
7665 build_complex_cproj (tree type, bool neg)
7667 REAL_VALUE_TYPE rinf, rzero = dconst0;
7669 real_inf (&rinf);
7670 rzero.sign = neg;
7671 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7672 build_real (TREE_TYPE (type), rzero));
7675 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7676 return type. Return NULL_TREE if no simplification can be made. */
7678 static tree
7679 fold_builtin_cproj (location_t loc, tree arg, tree type)
7681 if (!validate_arg (arg, COMPLEX_TYPE)
7682 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7683 return NULL_TREE;
7685 /* If there are no infinities, return arg. */
7686 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7687 return non_lvalue_loc (loc, arg);
7689 /* Calculate the result when the argument is a constant. */
7690 if (TREE_CODE (arg) == COMPLEX_CST)
7692 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7693 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7695 if (real_isinf (real) || real_isinf (imag))
7696 return build_complex_cproj (type, imag->sign);
7697 else
7698 return arg;
7700 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7702 tree real = TREE_OPERAND (arg, 0);
7703 tree imag = TREE_OPERAND (arg, 1);
7705 STRIP_NOPS (real);
7706 STRIP_NOPS (imag);
7708 /* If the real part is inf and the imag part is known to be
7709 nonnegative, return (inf + 0i). Remember side-effects are
7710 possible in the imag part. */
7711 if (TREE_CODE (real) == REAL_CST
7712 && real_isinf (TREE_REAL_CST_PTR (real))
7713 && tree_expr_nonnegative_p (imag))
7714 return omit_one_operand_loc (loc, type,
7715 build_complex_cproj (type, false),
7716 arg);
7718 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7719 Remember side-effects are possible in the real part. */
7720 if (TREE_CODE (imag) == REAL_CST
7721 && real_isinf (TREE_REAL_CST_PTR (imag)))
7722 return
7723 omit_one_operand_loc (loc, type,
7724 build_complex_cproj (type, TREE_REAL_CST_PTR
7725 (imag)->sign), arg);
7728 return NULL_TREE;
7731 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7732 Return NULL_TREE if no simplification can be made. */
7734 static tree
7735 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7738 enum built_in_function fcode;
7739 tree res;
7741 if (!validate_arg (arg, REAL_TYPE))
7742 return NULL_TREE;
7744 /* Calculate the result when the argument is a constant. */
7745 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7746 return res;
7748 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7749 fcode = builtin_mathfn_code (arg);
7750 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7752 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7753 arg = fold_build2_loc (loc, MULT_EXPR, type,
7754 CALL_EXPR_ARG (arg, 0),
7755 build_real (type, dconsthalf));
7756 return build_call_expr_loc (loc, expfn, 1, arg);
7759 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7760 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7762 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7764 if (powfn)
7766 tree arg0 = CALL_EXPR_ARG (arg, 0);
7767 tree tree_root;
7768 /* The inner root was either sqrt or cbrt. */
7769 /* This was a conditional expression but it triggered a bug
7770 in Sun C 5.5. */
7771 REAL_VALUE_TYPE dconstroot;
7772 if (BUILTIN_SQRT_P (fcode))
7773 dconstroot = dconsthalf;
7774 else
7775 dconstroot = dconst_third ();
7777 /* Adjust for the outer root. */
7778 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7779 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7780 tree_root = build_real (type, dconstroot);
7781 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7785 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7786 if (flag_unsafe_math_optimizations
7787 && (fcode == BUILT_IN_POW
7788 || fcode == BUILT_IN_POWF
7789 || fcode == BUILT_IN_POWL))
7791 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7792 tree arg0 = CALL_EXPR_ARG (arg, 0);
7793 tree arg1 = CALL_EXPR_ARG (arg, 1);
7794 tree narg1;
7795 if (!tree_expr_nonnegative_p (arg0))
7796 arg0 = build1 (ABS_EXPR, type, arg0);
7797 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7798 build_real (type, dconsthalf));
7799 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7802 return NULL_TREE;
7805 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7806 Return NULL_TREE if no simplification can be made. */
7808 static tree
7809 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7811 const enum built_in_function fcode = builtin_mathfn_code (arg);
7812 tree res;
7814 if (!validate_arg (arg, REAL_TYPE))
7815 return NULL_TREE;
7817 /* Calculate the result when the argument is a constant. */
7818 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7819 return res;
7821 if (flag_unsafe_math_optimizations)
7823 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7824 if (BUILTIN_EXPONENT_P (fcode))
7826 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7827 const REAL_VALUE_TYPE third_trunc =
7828 real_value_truncate (TYPE_MODE (type), dconst_third ());
7829 arg = fold_build2_loc (loc, MULT_EXPR, type,
7830 CALL_EXPR_ARG (arg, 0),
7831 build_real (type, third_trunc));
7832 return build_call_expr_loc (loc, expfn, 1, arg);
7835 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7836 if (BUILTIN_SQRT_P (fcode))
7838 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7840 if (powfn)
7842 tree arg0 = CALL_EXPR_ARG (arg, 0);
7843 tree tree_root;
7844 REAL_VALUE_TYPE dconstroot = dconst_third ();
7846 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7847 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7848 tree_root = build_real (type, dconstroot);
7849 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7853 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7854 if (BUILTIN_CBRT_P (fcode))
7856 tree arg0 = CALL_EXPR_ARG (arg, 0);
7857 if (tree_expr_nonnegative_p (arg0))
7859 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7861 if (powfn)
7863 tree tree_root;
7864 REAL_VALUE_TYPE dconstroot;
7866 real_arithmetic (&dconstroot, MULT_EXPR,
7867 dconst_third_ptr (), dconst_third_ptr ());
7868 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7869 tree_root = build_real (type, dconstroot);
7870 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7875 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7876 if (fcode == BUILT_IN_POW
7877 || fcode == BUILT_IN_POWF
7878 || fcode == BUILT_IN_POWL)
7880 tree arg00 = CALL_EXPR_ARG (arg, 0);
7881 tree arg01 = CALL_EXPR_ARG (arg, 1);
7882 if (tree_expr_nonnegative_p (arg00))
7884 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7885 const REAL_VALUE_TYPE dconstroot
7886 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7887 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7888 build_real (type, dconstroot));
7889 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7893 return NULL_TREE;
7896 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7897 TYPE is the type of the return value. Return NULL_TREE if no
7898 simplification can be made. */
7900 static tree
7901 fold_builtin_cos (location_t loc,
7902 tree arg, tree type, tree fndecl)
7904 tree res, narg;
7906 if (!validate_arg (arg, REAL_TYPE))
7907 return NULL_TREE;
7909 /* Calculate the result when the argument is a constant. */
7910 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7911 return res;
7913 /* Optimize cos(-x) into cos (x). */
7914 if ((narg = fold_strip_sign_ops (arg)))
7915 return build_call_expr_loc (loc, fndecl, 1, narg);
7917 return NULL_TREE;
7920 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7921 Return NULL_TREE if no simplification can be made. */
7923 static tree
7924 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7926 if (validate_arg (arg, REAL_TYPE))
7928 tree res, narg;
7930 /* Calculate the result when the argument is a constant. */
7931 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7932 return res;
7934 /* Optimize cosh(-x) into cosh (x). */
7935 if ((narg = fold_strip_sign_ops (arg)))
7936 return build_call_expr_loc (loc, fndecl, 1, narg);
7939 return NULL_TREE;
7942 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7943 argument ARG. TYPE is the type of the return value. Return
7944 NULL_TREE if no simplification can be made. */
7946 static tree
7947 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7948 bool hyper)
7950 if (validate_arg (arg, COMPLEX_TYPE)
7951 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7953 tree tmp;
7955 /* Calculate the result when the argument is a constant. */
7956 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7957 return tmp;
7959 /* Optimize fn(-x) into fn(x). */
7960 if ((tmp = fold_strip_sign_ops (arg)))
7961 return build_call_expr_loc (loc, fndecl, 1, tmp);
7964 return NULL_TREE;
7967 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7968 Return NULL_TREE if no simplification can be made. */
7970 static tree
7971 fold_builtin_tan (tree arg, tree type)
7973 enum built_in_function fcode;
7974 tree res;
7976 if (!validate_arg (arg, REAL_TYPE))
7977 return NULL_TREE;
7979 /* Calculate the result when the argument is a constant. */
7980 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7981 return res;
7983 /* Optimize tan(atan(x)) = x. */
7984 fcode = builtin_mathfn_code (arg);
7985 if (flag_unsafe_math_optimizations
7986 && (fcode == BUILT_IN_ATAN
7987 || fcode == BUILT_IN_ATANF
7988 || fcode == BUILT_IN_ATANL))
7989 return CALL_EXPR_ARG (arg, 0);
7991 return NULL_TREE;
7994 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7995 NULL_TREE if no simplification can be made. */
7997 static tree
7998 fold_builtin_sincos (location_t loc,
7999 tree arg0, tree arg1, tree arg2)
8001 tree type;
8002 tree res, fn, call;
8004 if (!validate_arg (arg0, REAL_TYPE)
8005 || !validate_arg (arg1, POINTER_TYPE)
8006 || !validate_arg (arg2, POINTER_TYPE))
8007 return NULL_TREE;
8009 type = TREE_TYPE (arg0);
8011 /* Calculate the result when the argument is a constant. */
8012 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8013 return res;
8015 /* Canonicalize sincos to cexpi. */
8016 if (!targetm.libc_has_function (function_c99_math_complex))
8017 return NULL_TREE;
8018 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8019 if (!fn)
8020 return NULL_TREE;
8022 call = build_call_expr_loc (loc, fn, 1, arg0);
8023 call = builtin_save_expr (call);
8025 return build2 (COMPOUND_EXPR, void_type_node,
8026 build2 (MODIFY_EXPR, void_type_node,
8027 build_fold_indirect_ref_loc (loc, arg1),
8028 build1 (IMAGPART_EXPR, type, call)),
8029 build2 (MODIFY_EXPR, void_type_node,
8030 build_fold_indirect_ref_loc (loc, arg2),
8031 build1 (REALPART_EXPR, type, call)));
8034 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8035 NULL_TREE if no simplification can be made. */
8037 static tree
8038 fold_builtin_cexp (location_t loc, tree arg0, tree type)
8040 tree rtype;
8041 tree realp, imagp, ifn;
8042 tree res;
8044 if (!validate_arg (arg0, COMPLEX_TYPE)
8045 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
8046 return NULL_TREE;
8048 /* Calculate the result when the argument is a constant. */
8049 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8050 return res;
8052 rtype = TREE_TYPE (TREE_TYPE (arg0));
8054 /* In case we can figure out the real part of arg0 and it is constant zero
8055 fold to cexpi. */
8056 if (!targetm.libc_has_function (function_c99_math_complex))
8057 return NULL_TREE;
8058 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8059 if (!ifn)
8060 return NULL_TREE;
8062 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
8063 && real_zerop (realp))
8065 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8066 return build_call_expr_loc (loc, ifn, 1, narg);
8069 /* In case we can easily decompose real and imaginary parts split cexp
8070 to exp (r) * cexpi (i). */
8071 if (flag_unsafe_math_optimizations
8072 && realp)
8074 tree rfn, rcall, icall;
8076 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8077 if (!rfn)
8078 return NULL_TREE;
8080 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
8081 if (!imagp)
8082 return NULL_TREE;
8084 icall = build_call_expr_loc (loc, ifn, 1, imagp);
8085 icall = builtin_save_expr (icall);
8086 rcall = build_call_expr_loc (loc, rfn, 1, realp);
8087 rcall = builtin_save_expr (rcall);
8088 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8089 fold_build2_loc (loc, MULT_EXPR, rtype,
8090 rcall,
8091 fold_build1_loc (loc, REALPART_EXPR,
8092 rtype, icall)),
8093 fold_build2_loc (loc, MULT_EXPR, rtype,
8094 rcall,
8095 fold_build1_loc (loc, IMAGPART_EXPR,
8096 rtype, icall)));
8099 return NULL_TREE;
8102 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8103 Return NULL_TREE if no simplification can be made. */
8105 static tree
8106 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
8108 if (!validate_arg (arg, REAL_TYPE))
8109 return NULL_TREE;
8111 /* Optimize trunc of constant value. */
8112 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8114 REAL_VALUE_TYPE r, x;
8115 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8117 x = TREE_REAL_CST (arg);
8118 real_trunc (&r, TYPE_MODE (type), &x);
8119 return build_real (type, r);
8122 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8125 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8126 Return NULL_TREE if no simplification can be made. */
8128 static tree
8129 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
8131 if (!validate_arg (arg, REAL_TYPE))
8132 return NULL_TREE;
8134 /* Optimize floor of constant value. */
8135 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8137 REAL_VALUE_TYPE x;
8139 x = TREE_REAL_CST (arg);
8140 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8142 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8143 REAL_VALUE_TYPE r;
8145 real_floor (&r, TYPE_MODE (type), &x);
8146 return build_real (type, r);
8150 /* Fold floor (x) where x is nonnegative to trunc (x). */
8151 if (tree_expr_nonnegative_p (arg))
8153 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8154 if (truncfn)
8155 return build_call_expr_loc (loc, truncfn, 1, arg);
8158 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8161 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8162 Return NULL_TREE if no simplification can be made. */
8164 static tree
8165 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8167 if (!validate_arg (arg, REAL_TYPE))
8168 return NULL_TREE;
8170 /* Optimize ceil of constant value. */
8171 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8173 REAL_VALUE_TYPE x;
8175 x = TREE_REAL_CST (arg);
8176 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8178 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8179 REAL_VALUE_TYPE r;
8181 real_ceil (&r, TYPE_MODE (type), &x);
8182 return build_real (type, r);
8186 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8189 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8190 Return NULL_TREE if no simplification can be made. */
8192 static tree
8193 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8195 if (!validate_arg (arg, REAL_TYPE))
8196 return NULL_TREE;
8198 /* Optimize round of constant value. */
8199 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8201 REAL_VALUE_TYPE x;
8203 x = TREE_REAL_CST (arg);
8204 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8206 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8207 REAL_VALUE_TYPE r;
8209 real_round (&r, TYPE_MODE (type), &x);
8210 return build_real (type, r);
8214 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8217 /* Fold function call to builtin lround, lroundf or lroundl (or the
8218 corresponding long long versions) and other rounding functions. ARG
8219 is the argument to the call. Return NULL_TREE if no simplification
8220 can be made. */
8222 static tree
8223 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8225 if (!validate_arg (arg, REAL_TYPE))
8226 return NULL_TREE;
8228 /* Optimize lround of constant value. */
8229 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8231 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8233 if (real_isfinite (&x))
8235 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8236 tree ftype = TREE_TYPE (arg);
8237 REAL_VALUE_TYPE r;
8238 bool fail = false;
8240 switch (DECL_FUNCTION_CODE (fndecl))
8242 CASE_FLT_FN (BUILT_IN_IFLOOR):
8243 CASE_FLT_FN (BUILT_IN_LFLOOR):
8244 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8245 real_floor (&r, TYPE_MODE (ftype), &x);
8246 break;
8248 CASE_FLT_FN (BUILT_IN_ICEIL):
8249 CASE_FLT_FN (BUILT_IN_LCEIL):
8250 CASE_FLT_FN (BUILT_IN_LLCEIL):
8251 real_ceil (&r, TYPE_MODE (ftype), &x);
8252 break;
8254 CASE_FLT_FN (BUILT_IN_IROUND):
8255 CASE_FLT_FN (BUILT_IN_LROUND):
8256 CASE_FLT_FN (BUILT_IN_LLROUND):
8257 real_round (&r, TYPE_MODE (ftype), &x);
8258 break;
8260 default:
8261 gcc_unreachable ();
8264 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8265 if (!fail)
8266 return wide_int_to_tree (itype, val);
8270 switch (DECL_FUNCTION_CODE (fndecl))
8272 CASE_FLT_FN (BUILT_IN_LFLOOR):
8273 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8274 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8275 if (tree_expr_nonnegative_p (arg))
8276 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8277 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8278 break;
8279 default:;
8282 return fold_fixed_mathfn (loc, fndecl, arg);
8285 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8286 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8287 the argument to the call. Return NULL_TREE if no simplification can
8288 be made. */
8290 static tree
8291 fold_builtin_bitop (tree fndecl, tree arg)
8293 if (!validate_arg (arg, INTEGER_TYPE))
8294 return NULL_TREE;
8296 /* Optimize for constant argument. */
8297 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8299 tree type = TREE_TYPE (arg);
8300 int result;
8302 switch (DECL_FUNCTION_CODE (fndecl))
8304 CASE_INT_FN (BUILT_IN_FFS):
8305 result = wi::ffs (arg);
8306 break;
8308 CASE_INT_FN (BUILT_IN_CLZ):
8309 if (wi::ne_p (arg, 0))
8310 result = wi::clz (arg);
8311 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8312 result = TYPE_PRECISION (type);
8313 break;
8315 CASE_INT_FN (BUILT_IN_CTZ):
8316 if (wi::ne_p (arg, 0))
8317 result = wi::ctz (arg);
8318 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8319 result = TYPE_PRECISION (type);
8320 break;
8322 CASE_INT_FN (BUILT_IN_CLRSB):
8323 result = wi::clrsb (arg);
8324 break;
8326 CASE_INT_FN (BUILT_IN_POPCOUNT):
8327 result = wi::popcount (arg);
8328 break;
8330 CASE_INT_FN (BUILT_IN_PARITY):
8331 result = wi::parity (arg);
8332 break;
8334 default:
8335 gcc_unreachable ();
8338 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8341 return NULL_TREE;
8344 /* Fold function call to builtin_bswap and the short, long and long long
8345 variants. Return NULL_TREE if no simplification can be made. */
8346 static tree
8347 fold_builtin_bswap (tree fndecl, tree arg)
8349 if (! validate_arg (arg, INTEGER_TYPE))
8350 return NULL_TREE;
8352 /* Optimize constant value. */
8353 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8355 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8357 switch (DECL_FUNCTION_CODE (fndecl))
8359 case BUILT_IN_BSWAP16:
8360 case BUILT_IN_BSWAP32:
8361 case BUILT_IN_BSWAP64:
8363 signop sgn = TYPE_SIGN (type);
8364 tree result =
8365 wide_int_to_tree (type,
8366 wide_int::from (arg, TYPE_PRECISION (type),
8367 sgn).bswap ());
8368 return result;
8370 default:
8371 gcc_unreachable ();
8375 return NULL_TREE;
8378 /* A subroutine of fold_builtin to fold the various logarithmic
8379 functions. Return NULL_TREE if no simplification can me made.
8380 FUNC is the corresponding MPFR logarithm function. */
8382 static tree
8383 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8384 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8386 if (validate_arg (arg, REAL_TYPE))
8388 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8389 tree res;
8390 const enum built_in_function fcode = builtin_mathfn_code (arg);
8392 /* Calculate the result when the argument is a constant. */
8393 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8394 return res;
8396 /* Special case, optimize logN(expN(x)) = x. */
8397 if (flag_unsafe_math_optimizations
8398 && ((func == mpfr_log
8399 && (fcode == BUILT_IN_EXP
8400 || fcode == BUILT_IN_EXPF
8401 || fcode == BUILT_IN_EXPL))
8402 || (func == mpfr_log2
8403 && (fcode == BUILT_IN_EXP2
8404 || fcode == BUILT_IN_EXP2F
8405 || fcode == BUILT_IN_EXP2L))
8406 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8407 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8409 /* Optimize logN(func()) for various exponential functions. We
8410 want to determine the value "x" and the power "exponent" in
8411 order to transform logN(x**exponent) into exponent*logN(x). */
8412 if (flag_unsafe_math_optimizations)
8414 tree exponent = 0, x = 0;
8416 switch (fcode)
8418 CASE_FLT_FN (BUILT_IN_EXP):
8419 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8420 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8421 dconst_e ()));
8422 exponent = CALL_EXPR_ARG (arg, 0);
8423 break;
8424 CASE_FLT_FN (BUILT_IN_EXP2):
8425 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8426 x = build_real (type, dconst2);
8427 exponent = CALL_EXPR_ARG (arg, 0);
8428 break;
8429 CASE_FLT_FN (BUILT_IN_EXP10):
8430 CASE_FLT_FN (BUILT_IN_POW10):
8431 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8433 REAL_VALUE_TYPE dconst10;
8434 real_from_integer (&dconst10, VOIDmode, 10, SIGNED);
8435 x = build_real (type, dconst10);
8437 exponent = CALL_EXPR_ARG (arg, 0);
8438 break;
8439 CASE_FLT_FN (BUILT_IN_SQRT):
8440 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8441 x = CALL_EXPR_ARG (arg, 0);
8442 exponent = build_real (type, dconsthalf);
8443 break;
8444 CASE_FLT_FN (BUILT_IN_CBRT):
8445 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8446 x = CALL_EXPR_ARG (arg, 0);
8447 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8448 dconst_third ()));
8449 break;
8450 CASE_FLT_FN (BUILT_IN_POW):
8451 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8452 x = CALL_EXPR_ARG (arg, 0);
8453 exponent = CALL_EXPR_ARG (arg, 1);
8454 break;
8455 default:
8456 break;
8459 /* Now perform the optimization. */
8460 if (x && exponent)
8462 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8463 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8468 return NULL_TREE;
8471 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8472 NULL_TREE if no simplification can be made. */
8474 static tree
8475 fold_builtin_hypot (location_t loc, tree fndecl,
8476 tree arg0, tree arg1, tree type)
8478 tree res, narg0, narg1;
8480 if (!validate_arg (arg0, REAL_TYPE)
8481 || !validate_arg (arg1, REAL_TYPE))
8482 return NULL_TREE;
8484 /* Calculate the result when the argument is a constant. */
8485 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8486 return res;
8488 /* If either argument to hypot has a negate or abs, strip that off.
8489 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8490 narg0 = fold_strip_sign_ops (arg0);
8491 narg1 = fold_strip_sign_ops (arg1);
8492 if (narg0 || narg1)
8494 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8495 narg1 ? narg1 : arg1);
8498 /* If either argument is zero, hypot is fabs of the other. */
8499 if (real_zerop (arg0))
8500 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8501 else if (real_zerop (arg1))
8502 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8504 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8505 if (flag_unsafe_math_optimizations
8506 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8508 const REAL_VALUE_TYPE sqrt2_trunc
8509 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8510 return fold_build2_loc (loc, MULT_EXPR, type,
8511 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8512 build_real (type, sqrt2_trunc));
8515 return NULL_TREE;
8519 /* Fold a builtin function call to pow, powf, or powl. Return
8520 NULL_TREE if no simplification can be made. */
8521 static tree
8522 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8524 tree res;
8526 if (!validate_arg (arg0, REAL_TYPE)
8527 || !validate_arg (arg1, REAL_TYPE))
8528 return NULL_TREE;
8530 /* Calculate the result when the argument is a constant. */
8531 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8532 return res;
8534 /* Optimize pow(1.0,y) = 1.0. */
8535 if (real_onep (arg0))
8536 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8538 if (TREE_CODE (arg1) == REAL_CST
8539 && !TREE_OVERFLOW (arg1))
8541 REAL_VALUE_TYPE cint;
8542 REAL_VALUE_TYPE c;
8543 HOST_WIDE_INT n;
8545 c = TREE_REAL_CST (arg1);
8547 /* Optimize pow(x,0.0) = 1.0. */
8548 if (REAL_VALUES_EQUAL (c, dconst0))
8549 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8550 arg0);
8552 /* Optimize pow(x,1.0) = x. */
8553 if (REAL_VALUES_EQUAL (c, dconst1))
8554 return arg0;
8556 /* Optimize pow(x,-1.0) = 1.0/x. */
8557 if (REAL_VALUES_EQUAL (c, dconstm1))
8558 return fold_build2_loc (loc, RDIV_EXPR, type,
8559 build_real (type, dconst1), arg0);
8561 /* Optimize pow(x,0.5) = sqrt(x). */
8562 if (flag_unsafe_math_optimizations
8563 && REAL_VALUES_EQUAL (c, dconsthalf))
8565 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8567 if (sqrtfn != NULL_TREE)
8568 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8571 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8572 if (flag_unsafe_math_optimizations)
8574 const REAL_VALUE_TYPE dconstroot
8575 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8577 if (REAL_VALUES_EQUAL (c, dconstroot))
8579 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8580 if (cbrtfn != NULL_TREE)
8581 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8585 /* Check for an integer exponent. */
8586 n = real_to_integer (&c);
8587 real_from_integer (&cint, VOIDmode, n, SIGNED);
8588 if (real_identical (&c, &cint))
8590 /* Attempt to evaluate pow at compile-time, unless this should
8591 raise an exception. */
8592 if (TREE_CODE (arg0) == REAL_CST
8593 && !TREE_OVERFLOW (arg0)
8594 && (n > 0
8595 || (!flag_trapping_math && !flag_errno_math)
8596 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8598 REAL_VALUE_TYPE x;
8599 bool inexact;
8601 x = TREE_REAL_CST (arg0);
8602 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8603 if (flag_unsafe_math_optimizations || !inexact)
8604 return build_real (type, x);
8607 /* Strip sign ops from even integer powers. */
8608 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8610 tree narg0 = fold_strip_sign_ops (arg0);
8611 if (narg0)
8612 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8617 if (flag_unsafe_math_optimizations)
8619 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8621 /* Optimize pow(expN(x),y) = expN(x*y). */
8622 if (BUILTIN_EXPONENT_P (fcode))
8624 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8625 tree arg = CALL_EXPR_ARG (arg0, 0);
8626 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8627 return build_call_expr_loc (loc, expfn, 1, arg);
8630 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8631 if (BUILTIN_SQRT_P (fcode))
8633 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8634 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8635 build_real (type, dconsthalf));
8636 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8639 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8640 if (BUILTIN_CBRT_P (fcode))
8642 tree arg = CALL_EXPR_ARG (arg0, 0);
8643 if (tree_expr_nonnegative_p (arg))
8645 const REAL_VALUE_TYPE dconstroot
8646 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8647 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8648 build_real (type, dconstroot));
8649 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8653 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8654 if (fcode == BUILT_IN_POW
8655 || fcode == BUILT_IN_POWF
8656 || fcode == BUILT_IN_POWL)
8658 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8659 if (tree_expr_nonnegative_p (arg00))
8661 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8662 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8663 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8668 return NULL_TREE;
8671 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8672 Return NULL_TREE if no simplification can be made. */
8673 static tree
8674 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8675 tree arg0, tree arg1, tree type)
8677 if (!validate_arg (arg0, REAL_TYPE)
8678 || !validate_arg (arg1, INTEGER_TYPE))
8679 return NULL_TREE;
8681 /* Optimize pow(1.0,y) = 1.0. */
8682 if (real_onep (arg0))
8683 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8685 if (tree_fits_shwi_p (arg1))
8687 HOST_WIDE_INT c = tree_to_shwi (arg1);
8689 /* Evaluate powi at compile-time. */
8690 if (TREE_CODE (arg0) == REAL_CST
8691 && !TREE_OVERFLOW (arg0))
8693 REAL_VALUE_TYPE x;
8694 x = TREE_REAL_CST (arg0);
8695 real_powi (&x, TYPE_MODE (type), &x, c);
8696 return build_real (type, x);
8699 /* Optimize pow(x,0) = 1.0. */
8700 if (c == 0)
8701 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8702 arg0);
8704 /* Optimize pow(x,1) = x. */
8705 if (c == 1)
8706 return arg0;
8708 /* Optimize pow(x,-1) = 1.0/x. */
8709 if (c == -1)
8710 return fold_build2_loc (loc, RDIV_EXPR, type,
8711 build_real (type, dconst1), arg0);
8714 return NULL_TREE;
8717 /* A subroutine of fold_builtin to fold the various exponent
8718 functions. Return NULL_TREE if no simplification can be made.
8719 FUNC is the corresponding MPFR exponent function. */
8721 static tree
8722 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8723 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8725 if (validate_arg (arg, REAL_TYPE))
8727 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8728 tree res;
8730 /* Calculate the result when the argument is a constant. */
8731 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8732 return res;
8734 /* Optimize expN(logN(x)) = x. */
8735 if (flag_unsafe_math_optimizations)
8737 const enum built_in_function fcode = builtin_mathfn_code (arg);
8739 if ((func == mpfr_exp
8740 && (fcode == BUILT_IN_LOG
8741 || fcode == BUILT_IN_LOGF
8742 || fcode == BUILT_IN_LOGL))
8743 || (func == mpfr_exp2
8744 && (fcode == BUILT_IN_LOG2
8745 || fcode == BUILT_IN_LOG2F
8746 || fcode == BUILT_IN_LOG2L))
8747 || (func == mpfr_exp10
8748 && (fcode == BUILT_IN_LOG10
8749 || fcode == BUILT_IN_LOG10F
8750 || fcode == BUILT_IN_LOG10L)))
8751 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8755 return NULL_TREE;
8758 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8759 Return NULL_TREE if no simplification can be made. */
8761 static tree
8762 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8764 tree fn, len, lenp1, call, type;
8766 if (!validate_arg (dest, POINTER_TYPE)
8767 || !validate_arg (src, POINTER_TYPE))
8768 return NULL_TREE;
8770 len = c_strlen (src, 1);
8771 if (!len
8772 || TREE_CODE (len) != INTEGER_CST)
8773 return NULL_TREE;
8775 if (optimize_function_for_size_p (cfun)
8776 /* If length is zero it's small enough. */
8777 && !integer_zerop (len))
8778 return NULL_TREE;
8780 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8781 if (!fn)
8782 return NULL_TREE;
8784 lenp1 = size_binop_loc (loc, PLUS_EXPR,
8785 fold_convert_loc (loc, size_type_node, len),
8786 build_int_cst (size_type_node, 1));
8787 /* We use dest twice in building our expression. Save it from
8788 multiple expansions. */
8789 dest = builtin_save_expr (dest);
8790 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8792 type = TREE_TYPE (TREE_TYPE (fndecl));
8793 dest = fold_build_pointer_plus_loc (loc, dest, len);
8794 dest = fold_convert_loc (loc, type, dest);
8795 dest = omit_one_operand_loc (loc, type, dest, call);
8796 return dest;
8799 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8800 arguments to the call, and TYPE is its return type.
8801 Return NULL_TREE if no simplification can be made. */
8803 static tree
8804 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8806 if (!validate_arg (arg1, POINTER_TYPE)
8807 || !validate_arg (arg2, INTEGER_TYPE)
8808 || !validate_arg (len, INTEGER_TYPE))
8809 return NULL_TREE;
8810 else
8812 const char *p1;
8814 if (TREE_CODE (arg2) != INTEGER_CST
8815 || !tree_fits_uhwi_p (len))
8816 return NULL_TREE;
8818 p1 = c_getstr (arg1);
8819 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8821 char c;
8822 const char *r;
8823 tree tem;
8825 if (target_char_cast (arg2, &c))
8826 return NULL_TREE;
8828 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8830 if (r == NULL)
8831 return build_int_cst (TREE_TYPE (arg1), 0);
8833 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8834 return fold_convert_loc (loc, type, tem);
8836 return NULL_TREE;
8840 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8841 Return NULL_TREE if no simplification can be made. */
8843 static tree
8844 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8846 const char *p1, *p2;
8848 if (!validate_arg (arg1, POINTER_TYPE)
8849 || !validate_arg (arg2, POINTER_TYPE)
8850 || !validate_arg (len, INTEGER_TYPE))
8851 return NULL_TREE;
8853 /* If the LEN parameter is zero, return zero. */
8854 if (integer_zerop (len))
8855 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8856 arg1, arg2);
8858 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8859 if (operand_equal_p (arg1, arg2, 0))
8860 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8862 p1 = c_getstr (arg1);
8863 p2 = c_getstr (arg2);
8865 /* If all arguments are constant, and the value of len is not greater
8866 than the lengths of arg1 and arg2, evaluate at compile-time. */
8867 if (tree_fits_uhwi_p (len) && p1 && p2
8868 && compare_tree_int (len, strlen (p1) + 1) <= 0
8869 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8871 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8873 if (r > 0)
8874 return integer_one_node;
8875 else if (r < 0)
8876 return integer_minus_one_node;
8877 else
8878 return integer_zero_node;
8881 /* If len parameter is one, return an expression corresponding to
8882 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8883 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8885 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8886 tree cst_uchar_ptr_node
8887 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8889 tree ind1
8890 = fold_convert_loc (loc, integer_type_node,
8891 build1 (INDIRECT_REF, cst_uchar_node,
8892 fold_convert_loc (loc,
8893 cst_uchar_ptr_node,
8894 arg1)));
8895 tree ind2
8896 = fold_convert_loc (loc, integer_type_node,
8897 build1 (INDIRECT_REF, cst_uchar_node,
8898 fold_convert_loc (loc,
8899 cst_uchar_ptr_node,
8900 arg2)));
8901 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8904 return NULL_TREE;
8907 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8908 Return NULL_TREE if no simplification can be made. */
8910 static tree
8911 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8913 const char *p1, *p2;
8915 if (!validate_arg (arg1, POINTER_TYPE)
8916 || !validate_arg (arg2, POINTER_TYPE))
8917 return NULL_TREE;
8919 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8920 if (operand_equal_p (arg1, arg2, 0))
8921 return integer_zero_node;
8923 p1 = c_getstr (arg1);
8924 p2 = c_getstr (arg2);
8926 if (p1 && p2)
8928 const int i = strcmp (p1, p2);
8929 if (i < 0)
8930 return integer_minus_one_node;
8931 else if (i > 0)
8932 return integer_one_node;
8933 else
8934 return integer_zero_node;
8937 /* If the second arg is "", return *(const unsigned char*)arg1. */
8938 if (p2 && *p2 == '\0')
8940 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8941 tree cst_uchar_ptr_node
8942 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8944 return fold_convert_loc (loc, integer_type_node,
8945 build1 (INDIRECT_REF, cst_uchar_node,
8946 fold_convert_loc (loc,
8947 cst_uchar_ptr_node,
8948 arg1)));
8951 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8952 if (p1 && *p1 == '\0')
8954 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8955 tree cst_uchar_ptr_node
8956 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8958 tree temp
8959 = fold_convert_loc (loc, integer_type_node,
8960 build1 (INDIRECT_REF, cst_uchar_node,
8961 fold_convert_loc (loc,
8962 cst_uchar_ptr_node,
8963 arg2)));
8964 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8967 return NULL_TREE;
8970 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8971 Return NULL_TREE if no simplification can be made. */
8973 static tree
8974 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8976 const char *p1, *p2;
8978 if (!validate_arg (arg1, POINTER_TYPE)
8979 || !validate_arg (arg2, POINTER_TYPE)
8980 || !validate_arg (len, INTEGER_TYPE))
8981 return NULL_TREE;
8983 /* If the LEN parameter is zero, return zero. */
8984 if (integer_zerop (len))
8985 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8986 arg1, arg2);
8988 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8989 if (operand_equal_p (arg1, arg2, 0))
8990 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8992 p1 = c_getstr (arg1);
8993 p2 = c_getstr (arg2);
8995 if (tree_fits_uhwi_p (len) && p1 && p2)
8997 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8998 if (i > 0)
8999 return integer_one_node;
9000 else if (i < 0)
9001 return integer_minus_one_node;
9002 else
9003 return integer_zero_node;
9006 /* If the second arg is "", and the length is greater than zero,
9007 return *(const unsigned char*)arg1. */
9008 if (p2 && *p2 == '\0'
9009 && TREE_CODE (len) == INTEGER_CST
9010 && tree_int_cst_sgn (len) == 1)
9012 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9013 tree cst_uchar_ptr_node
9014 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9016 return fold_convert_loc (loc, integer_type_node,
9017 build1 (INDIRECT_REF, cst_uchar_node,
9018 fold_convert_loc (loc,
9019 cst_uchar_ptr_node,
9020 arg1)));
9023 /* If the first arg is "", and the length is greater than zero,
9024 return -*(const unsigned char*)arg2. */
9025 if (p1 && *p1 == '\0'
9026 && TREE_CODE (len) == INTEGER_CST
9027 && tree_int_cst_sgn (len) == 1)
9029 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9030 tree cst_uchar_ptr_node
9031 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9033 tree temp = fold_convert_loc (loc, integer_type_node,
9034 build1 (INDIRECT_REF, cst_uchar_node,
9035 fold_convert_loc (loc,
9036 cst_uchar_ptr_node,
9037 arg2)));
9038 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9041 /* If len parameter is one, return an expression corresponding to
9042 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9043 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9045 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9046 tree cst_uchar_ptr_node
9047 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9049 tree ind1 = fold_convert_loc (loc, integer_type_node,
9050 build1 (INDIRECT_REF, cst_uchar_node,
9051 fold_convert_loc (loc,
9052 cst_uchar_ptr_node,
9053 arg1)));
9054 tree ind2 = fold_convert_loc (loc, integer_type_node,
9055 build1 (INDIRECT_REF, cst_uchar_node,
9056 fold_convert_loc (loc,
9057 cst_uchar_ptr_node,
9058 arg2)));
9059 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9062 return NULL_TREE;
9065 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9066 ARG. Return NULL_TREE if no simplification can be made. */
9068 static tree
9069 fold_builtin_signbit (location_t loc, tree arg, tree type)
9071 if (!validate_arg (arg, REAL_TYPE))
9072 return NULL_TREE;
9074 /* If ARG is a compile-time constant, determine the result. */
9075 if (TREE_CODE (arg) == REAL_CST
9076 && !TREE_OVERFLOW (arg))
9078 REAL_VALUE_TYPE c;
9080 c = TREE_REAL_CST (arg);
9081 return (REAL_VALUE_NEGATIVE (c)
9082 ? build_one_cst (type)
9083 : build_zero_cst (type));
9086 /* If ARG is non-negative, the result is always zero. */
9087 if (tree_expr_nonnegative_p (arg))
9088 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9090 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9091 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9092 return fold_convert (type,
9093 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9094 build_real (TREE_TYPE (arg), dconst0)));
9096 return NULL_TREE;
9099 /* Fold function call to builtin copysign, copysignf or copysignl with
9100 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9101 be made. */
9103 static tree
9104 fold_builtin_copysign (location_t loc, tree fndecl,
9105 tree arg1, tree arg2, tree type)
9107 tree tem;
9109 if (!validate_arg (arg1, REAL_TYPE)
9110 || !validate_arg (arg2, REAL_TYPE))
9111 return NULL_TREE;
9113 /* copysign(X,X) is X. */
9114 if (operand_equal_p (arg1, arg2, 0))
9115 return fold_convert_loc (loc, type, arg1);
9117 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9118 if (TREE_CODE (arg1) == REAL_CST
9119 && TREE_CODE (arg2) == REAL_CST
9120 && !TREE_OVERFLOW (arg1)
9121 && !TREE_OVERFLOW (arg2))
9123 REAL_VALUE_TYPE c1, c2;
9125 c1 = TREE_REAL_CST (arg1);
9126 c2 = TREE_REAL_CST (arg2);
9127 /* c1.sign := c2.sign. */
9128 real_copysign (&c1, &c2);
9129 return build_real (type, c1);
9132 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9133 Remember to evaluate Y for side-effects. */
9134 if (tree_expr_nonnegative_p (arg2))
9135 return omit_one_operand_loc (loc, type,
9136 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9137 arg2);
9139 /* Strip sign changing operations for the first argument. */
9140 tem = fold_strip_sign_ops (arg1);
9141 if (tem)
9142 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9144 return NULL_TREE;
9147 /* Fold a call to builtin isascii with argument ARG. */
9149 static tree
9150 fold_builtin_isascii (location_t loc, tree arg)
9152 if (!validate_arg (arg, INTEGER_TYPE))
9153 return NULL_TREE;
9154 else
9156 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9157 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9158 build_int_cst (integer_type_node,
9159 ~ (unsigned HOST_WIDE_INT) 0x7f));
9160 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9161 arg, integer_zero_node);
9165 /* Fold a call to builtin toascii with argument ARG. */
9167 static tree
9168 fold_builtin_toascii (location_t loc, tree arg)
9170 if (!validate_arg (arg, INTEGER_TYPE))
9171 return NULL_TREE;
9173 /* Transform toascii(c) -> (c & 0x7f). */
9174 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9175 build_int_cst (integer_type_node, 0x7f));
9178 /* Fold a call to builtin isdigit with argument ARG. */
9180 static tree
9181 fold_builtin_isdigit (location_t loc, tree arg)
9183 if (!validate_arg (arg, INTEGER_TYPE))
9184 return NULL_TREE;
9185 else
9187 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9188 /* According to the C standard, isdigit is unaffected by locale.
9189 However, it definitely is affected by the target character set. */
9190 unsigned HOST_WIDE_INT target_digit0
9191 = lang_hooks.to_target_charset ('0');
9193 if (target_digit0 == 0)
9194 return NULL_TREE;
9196 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9197 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9198 build_int_cst (unsigned_type_node, target_digit0));
9199 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9200 build_int_cst (unsigned_type_node, 9));
9204 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9206 static tree
9207 fold_builtin_fabs (location_t loc, tree arg, tree type)
9209 if (!validate_arg (arg, REAL_TYPE))
9210 return NULL_TREE;
9212 arg = fold_convert_loc (loc, type, arg);
9213 if (TREE_CODE (arg) == REAL_CST)
9214 return fold_abs_const (arg, type);
9215 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9218 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9220 static tree
9221 fold_builtin_abs (location_t loc, tree arg, tree type)
9223 if (!validate_arg (arg, INTEGER_TYPE))
9224 return NULL_TREE;
9226 arg = fold_convert_loc (loc, type, arg);
9227 if (TREE_CODE (arg) == INTEGER_CST)
9228 return fold_abs_const (arg, type);
9229 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9232 /* Fold a fma operation with arguments ARG[012]. */
9234 tree
9235 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9236 tree type, tree arg0, tree arg1, tree arg2)
9238 if (TREE_CODE (arg0) == REAL_CST
9239 && TREE_CODE (arg1) == REAL_CST
9240 && TREE_CODE (arg2) == REAL_CST)
9241 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9243 return NULL_TREE;
9246 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9248 static tree
9249 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9251 if (validate_arg (arg0, REAL_TYPE)
9252 && validate_arg (arg1, REAL_TYPE)
9253 && validate_arg (arg2, REAL_TYPE))
9255 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9256 if (tem)
9257 return tem;
9259 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9260 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9261 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9263 return NULL_TREE;
9266 /* Fold a call to builtin fmin or fmax. */
9268 static tree
9269 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9270 tree type, bool max)
9272 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9274 /* Calculate the result when the argument is a constant. */
9275 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9277 if (res)
9278 return res;
9280 /* If either argument is NaN, return the other one. Avoid the
9281 transformation if we get (and honor) a signalling NaN. Using
9282 omit_one_operand() ensures we create a non-lvalue. */
9283 if (TREE_CODE (arg0) == REAL_CST
9284 && real_isnan (&TREE_REAL_CST (arg0))
9285 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9286 || ! TREE_REAL_CST (arg0).signalling))
9287 return omit_one_operand_loc (loc, type, arg1, arg0);
9288 if (TREE_CODE (arg1) == REAL_CST
9289 && real_isnan (&TREE_REAL_CST (arg1))
9290 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9291 || ! TREE_REAL_CST (arg1).signalling))
9292 return omit_one_operand_loc (loc, type, arg0, arg1);
9294 /* Transform fmin/fmax(x,x) -> x. */
9295 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9296 return omit_one_operand_loc (loc, type, arg0, arg1);
9298 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9299 functions to return the numeric arg if the other one is NaN.
9300 These tree codes don't honor that, so only transform if
9301 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9302 handled, so we don't have to worry about it either. */
9303 if (flag_finite_math_only)
9304 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9305 fold_convert_loc (loc, type, arg0),
9306 fold_convert_loc (loc, type, arg1));
9308 return NULL_TREE;
9311 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9313 static tree
9314 fold_builtin_carg (location_t loc, tree arg, tree type)
9316 if (validate_arg (arg, COMPLEX_TYPE)
9317 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9319 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9321 if (atan2_fn)
9323 tree new_arg = builtin_save_expr (arg);
9324 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9325 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9326 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9330 return NULL_TREE;
9333 /* Fold a call to builtin logb/ilogb. */
9335 static tree
9336 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9338 if (! validate_arg (arg, REAL_TYPE))
9339 return NULL_TREE;
9341 STRIP_NOPS (arg);
9343 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9345 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9347 switch (value->cl)
9349 case rvc_nan:
9350 case rvc_inf:
9351 /* If arg is Inf or NaN and we're logb, return it. */
9352 if (TREE_CODE (rettype) == REAL_TYPE)
9354 /* For logb(-Inf) we have to return +Inf. */
9355 if (real_isinf (value) && real_isneg (value))
9357 REAL_VALUE_TYPE tem;
9358 real_inf (&tem);
9359 return build_real (rettype, tem);
9361 return fold_convert_loc (loc, rettype, arg);
9363 /* Fall through... */
9364 case rvc_zero:
9365 /* Zero may set errno and/or raise an exception for logb, also
9366 for ilogb we don't know FP_ILOGB0. */
9367 return NULL_TREE;
9368 case rvc_normal:
9369 /* For normal numbers, proceed iff radix == 2. In GCC,
9370 normalized significands are in the range [0.5, 1.0). We
9371 want the exponent as if they were [1.0, 2.0) so get the
9372 exponent and subtract 1. */
9373 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9374 return fold_convert_loc (loc, rettype,
9375 build_int_cst (integer_type_node,
9376 REAL_EXP (value)-1));
9377 break;
9381 return NULL_TREE;
9384 /* Fold a call to builtin significand, if radix == 2. */
9386 static tree
9387 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9389 if (! validate_arg (arg, REAL_TYPE))
9390 return NULL_TREE;
9392 STRIP_NOPS (arg);
9394 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9396 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9398 switch (value->cl)
9400 case rvc_zero:
9401 case rvc_nan:
9402 case rvc_inf:
9403 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9404 return fold_convert_loc (loc, rettype, arg);
9405 case rvc_normal:
9406 /* For normal numbers, proceed iff radix == 2. */
9407 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9409 REAL_VALUE_TYPE result = *value;
9410 /* In GCC, normalized significands are in the range [0.5,
9411 1.0). We want them to be [1.0, 2.0) so set the
9412 exponent to 1. */
9413 SET_REAL_EXP (&result, 1);
9414 return build_real (rettype, result);
9416 break;
9420 return NULL_TREE;
9423 /* Fold a call to builtin frexp, we can assume the base is 2. */
9425 static tree
9426 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9428 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9429 return NULL_TREE;
9431 STRIP_NOPS (arg0);
9433 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9434 return NULL_TREE;
9436 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9438 /* Proceed if a valid pointer type was passed in. */
9439 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9441 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9442 tree frac, exp;
9444 switch (value->cl)
9446 case rvc_zero:
9447 /* For +-0, return (*exp = 0, +-0). */
9448 exp = integer_zero_node;
9449 frac = arg0;
9450 break;
9451 case rvc_nan:
9452 case rvc_inf:
9453 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9454 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9455 case rvc_normal:
9457 /* Since the frexp function always expects base 2, and in
9458 GCC normalized significands are already in the range
9459 [0.5, 1.0), we have exactly what frexp wants. */
9460 REAL_VALUE_TYPE frac_rvt = *value;
9461 SET_REAL_EXP (&frac_rvt, 0);
9462 frac = build_real (rettype, frac_rvt);
9463 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9465 break;
9466 default:
9467 gcc_unreachable ();
9470 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9471 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9472 TREE_SIDE_EFFECTS (arg1) = 1;
9473 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9476 return NULL_TREE;
9479 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9480 then we can assume the base is two. If it's false, then we have to
9481 check the mode of the TYPE parameter in certain cases. */
9483 static tree
9484 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9485 tree type, bool ldexp)
9487 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9489 STRIP_NOPS (arg0);
9490 STRIP_NOPS (arg1);
9492 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9493 if (real_zerop (arg0) || integer_zerop (arg1)
9494 || (TREE_CODE (arg0) == REAL_CST
9495 && !real_isfinite (&TREE_REAL_CST (arg0))))
9496 return omit_one_operand_loc (loc, type, arg0, arg1);
9498 /* If both arguments are constant, then try to evaluate it. */
9499 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9500 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9501 && tree_fits_shwi_p (arg1))
9503 /* Bound the maximum adjustment to twice the range of the
9504 mode's valid exponents. Use abs to ensure the range is
9505 positive as a sanity check. */
9506 const long max_exp_adj = 2 *
9507 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9508 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9510 /* Get the user-requested adjustment. */
9511 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9513 /* The requested adjustment must be inside this range. This
9514 is a preliminary cap to avoid things like overflow, we
9515 may still fail to compute the result for other reasons. */
9516 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9518 REAL_VALUE_TYPE initial_result;
9520 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9522 /* Ensure we didn't overflow. */
9523 if (! real_isinf (&initial_result))
9525 const REAL_VALUE_TYPE trunc_result
9526 = real_value_truncate (TYPE_MODE (type), initial_result);
9528 /* Only proceed if the target mode can hold the
9529 resulting value. */
9530 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9531 return build_real (type, trunc_result);
9537 return NULL_TREE;
9540 /* Fold a call to builtin modf. */
9542 static tree
9543 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9545 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9546 return NULL_TREE;
9548 STRIP_NOPS (arg0);
9550 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9551 return NULL_TREE;
9553 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9555 /* Proceed if a valid pointer type was passed in. */
9556 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9558 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9559 REAL_VALUE_TYPE trunc, frac;
9561 switch (value->cl)
9563 case rvc_nan:
9564 case rvc_zero:
9565 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9566 trunc = frac = *value;
9567 break;
9568 case rvc_inf:
9569 /* For +-Inf, return (*arg1 = arg0, +-0). */
9570 frac = dconst0;
9571 frac.sign = value->sign;
9572 trunc = *value;
9573 break;
9574 case rvc_normal:
9575 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9576 real_trunc (&trunc, VOIDmode, value);
9577 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9578 /* If the original number was negative and already
9579 integral, then the fractional part is -0.0. */
9580 if (value->sign && frac.cl == rvc_zero)
9581 frac.sign = value->sign;
9582 break;
9585 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9586 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9587 build_real (rettype, trunc));
9588 TREE_SIDE_EFFECTS (arg1) = 1;
9589 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9590 build_real (rettype, frac));
9593 return NULL_TREE;
9596 /* Given a location LOC, an interclass builtin function decl FNDECL
9597 and its single argument ARG, return an folded expression computing
9598 the same, or NULL_TREE if we either couldn't or didn't want to fold
9599 (the latter happen if there's an RTL instruction available). */
9601 static tree
9602 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9604 machine_mode mode;
9606 if (!validate_arg (arg, REAL_TYPE))
9607 return NULL_TREE;
9609 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9610 return NULL_TREE;
9612 mode = TYPE_MODE (TREE_TYPE (arg));
9614 /* If there is no optab, try generic code. */
9615 switch (DECL_FUNCTION_CODE (fndecl))
9617 tree result;
9619 CASE_FLT_FN (BUILT_IN_ISINF):
9621 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9622 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9623 tree const type = TREE_TYPE (arg);
9624 REAL_VALUE_TYPE r;
9625 char buf[128];
9627 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9628 real_from_string (&r, buf);
9629 result = build_call_expr (isgr_fn, 2,
9630 fold_build1_loc (loc, ABS_EXPR, type, arg),
9631 build_real (type, r));
9632 return result;
9634 CASE_FLT_FN (BUILT_IN_FINITE):
9635 case BUILT_IN_ISFINITE:
9637 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9638 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9639 tree const type = TREE_TYPE (arg);
9640 REAL_VALUE_TYPE r;
9641 char buf[128];
9643 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9644 real_from_string (&r, buf);
9645 result = build_call_expr (isle_fn, 2,
9646 fold_build1_loc (loc, ABS_EXPR, type, arg),
9647 build_real (type, r));
9648 /*result = fold_build2_loc (loc, UNGT_EXPR,
9649 TREE_TYPE (TREE_TYPE (fndecl)),
9650 fold_build1_loc (loc, ABS_EXPR, type, arg),
9651 build_real (type, r));
9652 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9653 TREE_TYPE (TREE_TYPE (fndecl)),
9654 result);*/
9655 return result;
9657 case BUILT_IN_ISNORMAL:
9659 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9660 islessequal(fabs(x),DBL_MAX). */
9661 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9662 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9663 tree const type = TREE_TYPE (arg);
9664 REAL_VALUE_TYPE rmax, rmin;
9665 char buf[128];
9667 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9668 real_from_string (&rmax, buf);
9669 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9670 real_from_string (&rmin, buf);
9671 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9672 result = build_call_expr (isle_fn, 2, arg,
9673 build_real (type, rmax));
9674 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9675 build_call_expr (isge_fn, 2, arg,
9676 build_real (type, rmin)));
9677 return result;
9679 default:
9680 break;
9683 return NULL_TREE;
9686 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9687 ARG is the argument for the call. */
9689 static tree
9690 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9692 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9693 REAL_VALUE_TYPE r;
9695 if (!validate_arg (arg, REAL_TYPE))
9696 return NULL_TREE;
9698 switch (builtin_index)
9700 case BUILT_IN_ISINF:
9701 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9702 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9704 if (TREE_CODE (arg) == REAL_CST)
9706 r = TREE_REAL_CST (arg);
9707 if (real_isinf (&r))
9708 return real_compare (GT_EXPR, &r, &dconst0)
9709 ? integer_one_node : integer_minus_one_node;
9710 else
9711 return integer_zero_node;
9714 return NULL_TREE;
9716 case BUILT_IN_ISINF_SIGN:
9718 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9719 /* In a boolean context, GCC will fold the inner COND_EXPR to
9720 1. So e.g. "if (isinf_sign(x))" would be folded to just
9721 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9722 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9723 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9724 tree tmp = NULL_TREE;
9726 arg = builtin_save_expr (arg);
9728 if (signbit_fn && isinf_fn)
9730 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9731 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9733 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9734 signbit_call, integer_zero_node);
9735 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9736 isinf_call, integer_zero_node);
9738 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9739 integer_minus_one_node, integer_one_node);
9740 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9741 isinf_call, tmp,
9742 integer_zero_node);
9745 return tmp;
9748 case BUILT_IN_ISFINITE:
9749 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9750 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9751 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9753 if (TREE_CODE (arg) == REAL_CST)
9755 r = TREE_REAL_CST (arg);
9756 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9759 return NULL_TREE;
9761 case BUILT_IN_ISNAN:
9762 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9763 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9765 if (TREE_CODE (arg) == REAL_CST)
9767 r = TREE_REAL_CST (arg);
9768 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9771 arg = builtin_save_expr (arg);
9772 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9774 default:
9775 gcc_unreachable ();
9779 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9780 This builtin will generate code to return the appropriate floating
9781 point classification depending on the value of the floating point
9782 number passed in. The possible return values must be supplied as
9783 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9784 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9785 one floating point argument which is "type generic". */
9787 static tree
9788 fold_builtin_fpclassify (location_t loc, tree exp)
9790 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9791 arg, type, res, tmp;
9792 machine_mode mode;
9793 REAL_VALUE_TYPE r;
9794 char buf[128];
9796 /* Verify the required arguments in the original call. */
9797 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9798 INTEGER_TYPE, INTEGER_TYPE,
9799 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9800 return NULL_TREE;
9802 fp_nan = CALL_EXPR_ARG (exp, 0);
9803 fp_infinite = CALL_EXPR_ARG (exp, 1);
9804 fp_normal = CALL_EXPR_ARG (exp, 2);
9805 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9806 fp_zero = CALL_EXPR_ARG (exp, 4);
9807 arg = CALL_EXPR_ARG (exp, 5);
9808 type = TREE_TYPE (arg);
9809 mode = TYPE_MODE (type);
9810 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9812 /* fpclassify(x) ->
9813 isnan(x) ? FP_NAN :
9814 (fabs(x) == Inf ? FP_INFINITE :
9815 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9816 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9818 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9819 build_real (type, dconst0));
9820 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9821 tmp, fp_zero, fp_subnormal);
9823 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9824 real_from_string (&r, buf);
9825 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9826 arg, build_real (type, r));
9827 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9829 if (HONOR_INFINITIES (mode))
9831 real_inf (&r);
9832 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9833 build_real (type, r));
9834 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9835 fp_infinite, res);
9838 if (HONOR_NANS (mode))
9840 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9841 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9844 return res;
9847 /* Fold a call to an unordered comparison function such as
9848 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9849 being called and ARG0 and ARG1 are the arguments for the call.
9850 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9851 the opposite of the desired result. UNORDERED_CODE is used
9852 for modes that can hold NaNs and ORDERED_CODE is used for
9853 the rest. */
9855 static tree
9856 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9857 enum tree_code unordered_code,
9858 enum tree_code ordered_code)
9860 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9861 enum tree_code code;
9862 tree type0, type1;
9863 enum tree_code code0, code1;
9864 tree cmp_type = NULL_TREE;
9866 type0 = TREE_TYPE (arg0);
9867 type1 = TREE_TYPE (arg1);
9869 code0 = TREE_CODE (type0);
9870 code1 = TREE_CODE (type1);
9872 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9873 /* Choose the wider of two real types. */
9874 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9875 ? type0 : type1;
9876 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9877 cmp_type = type0;
9878 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9879 cmp_type = type1;
9881 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9882 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9884 if (unordered_code == UNORDERED_EXPR)
9886 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9887 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9888 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9891 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9892 : ordered_code;
9893 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9894 fold_build2_loc (loc, code, type, arg0, arg1));
9897 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9898 arithmetics if it can never overflow, or into internal functions that
9899 return both result of arithmetics and overflowed boolean flag in
9900 a complex integer result, or some other check for overflow. */
9902 static tree
9903 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9904 tree arg0, tree arg1, tree arg2)
9906 enum internal_fn ifn = IFN_LAST;
9907 tree type = TREE_TYPE (TREE_TYPE (arg2));
9908 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9909 switch (fcode)
9911 case BUILT_IN_ADD_OVERFLOW:
9912 case BUILT_IN_SADD_OVERFLOW:
9913 case BUILT_IN_SADDL_OVERFLOW:
9914 case BUILT_IN_SADDLL_OVERFLOW:
9915 case BUILT_IN_UADD_OVERFLOW:
9916 case BUILT_IN_UADDL_OVERFLOW:
9917 case BUILT_IN_UADDLL_OVERFLOW:
9918 ifn = IFN_ADD_OVERFLOW;
9919 break;
9920 case BUILT_IN_SUB_OVERFLOW:
9921 case BUILT_IN_SSUB_OVERFLOW:
9922 case BUILT_IN_SSUBL_OVERFLOW:
9923 case BUILT_IN_SSUBLL_OVERFLOW:
9924 case BUILT_IN_USUB_OVERFLOW:
9925 case BUILT_IN_USUBL_OVERFLOW:
9926 case BUILT_IN_USUBLL_OVERFLOW:
9927 ifn = IFN_SUB_OVERFLOW;
9928 break;
9929 case BUILT_IN_MUL_OVERFLOW:
9930 case BUILT_IN_SMUL_OVERFLOW:
9931 case BUILT_IN_SMULL_OVERFLOW:
9932 case BUILT_IN_SMULLL_OVERFLOW:
9933 case BUILT_IN_UMUL_OVERFLOW:
9934 case BUILT_IN_UMULL_OVERFLOW:
9935 case BUILT_IN_UMULLL_OVERFLOW:
9936 ifn = IFN_MUL_OVERFLOW;
9937 break;
9938 default:
9939 gcc_unreachable ();
9941 tree ctype = build_complex_type (type);
9942 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9943 2, arg0, arg1);
9944 tree tgt = save_expr (call);
9945 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9946 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9947 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9948 tree store
9949 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9950 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9953 /* Fold a call to built-in function FNDECL with 0 arguments.
9954 IGNORE is true if the result of the function call is ignored. This
9955 function returns NULL_TREE if no simplification was possible. */
9957 static tree
9958 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9960 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9961 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9962 switch (fcode)
9964 CASE_FLT_FN (BUILT_IN_INF):
9965 case BUILT_IN_INFD32:
9966 case BUILT_IN_INFD64:
9967 case BUILT_IN_INFD128:
9968 return fold_builtin_inf (loc, type, true);
9970 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9971 return fold_builtin_inf (loc, type, false);
9973 case BUILT_IN_CLASSIFY_TYPE:
9974 return fold_builtin_classify_type (NULL_TREE);
9976 default:
9977 break;
9979 return NULL_TREE;
9982 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9983 IGNORE is true if the result of the function call is ignored. This
9984 function returns NULL_TREE if no simplification was possible. */
9986 static tree
9987 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9989 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9990 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9991 switch (fcode)
9993 case BUILT_IN_CONSTANT_P:
9995 tree val = fold_builtin_constant_p (arg0);
9997 /* Gimplification will pull the CALL_EXPR for the builtin out of
9998 an if condition. When not optimizing, we'll not CSE it back.
9999 To avoid link error types of regressions, return false now. */
10000 if (!val && !optimize)
10001 val = integer_zero_node;
10003 return val;
10006 case BUILT_IN_CLASSIFY_TYPE:
10007 return fold_builtin_classify_type (arg0);
10009 case BUILT_IN_STRLEN:
10010 return fold_builtin_strlen (loc, type, arg0);
10012 CASE_FLT_FN (BUILT_IN_FABS):
10013 case BUILT_IN_FABSD32:
10014 case BUILT_IN_FABSD64:
10015 case BUILT_IN_FABSD128:
10016 return fold_builtin_fabs (loc, arg0, type);
10018 case BUILT_IN_ABS:
10019 case BUILT_IN_LABS:
10020 case BUILT_IN_LLABS:
10021 case BUILT_IN_IMAXABS:
10022 return fold_builtin_abs (loc, arg0, type);
10024 CASE_FLT_FN (BUILT_IN_CONJ):
10025 if (validate_arg (arg0, COMPLEX_TYPE)
10026 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10027 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10028 break;
10030 CASE_FLT_FN (BUILT_IN_CREAL):
10031 if (validate_arg (arg0, COMPLEX_TYPE)
10032 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10033 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10034 break;
10036 CASE_FLT_FN (BUILT_IN_CIMAG):
10037 if (validate_arg (arg0, COMPLEX_TYPE)
10038 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10039 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10040 break;
10042 CASE_FLT_FN (BUILT_IN_CCOS):
10043 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
10045 CASE_FLT_FN (BUILT_IN_CCOSH):
10046 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
10048 CASE_FLT_FN (BUILT_IN_CPROJ):
10049 return fold_builtin_cproj (loc, arg0, type);
10051 CASE_FLT_FN (BUILT_IN_CSIN):
10052 if (validate_arg (arg0, COMPLEX_TYPE)
10053 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10054 return do_mpc_arg1 (arg0, type, mpc_sin);
10055 break;
10057 CASE_FLT_FN (BUILT_IN_CSINH):
10058 if (validate_arg (arg0, COMPLEX_TYPE)
10059 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10060 return do_mpc_arg1 (arg0, type, mpc_sinh);
10061 break;
10063 CASE_FLT_FN (BUILT_IN_CTAN):
10064 if (validate_arg (arg0, COMPLEX_TYPE)
10065 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10066 return do_mpc_arg1 (arg0, type, mpc_tan);
10067 break;
10069 CASE_FLT_FN (BUILT_IN_CTANH):
10070 if (validate_arg (arg0, COMPLEX_TYPE)
10071 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10072 return do_mpc_arg1 (arg0, type, mpc_tanh);
10073 break;
10075 CASE_FLT_FN (BUILT_IN_CLOG):
10076 if (validate_arg (arg0, COMPLEX_TYPE)
10077 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10078 return do_mpc_arg1 (arg0, type, mpc_log);
10079 break;
10081 CASE_FLT_FN (BUILT_IN_CSQRT):
10082 if (validate_arg (arg0, COMPLEX_TYPE)
10083 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10084 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10085 break;
10087 CASE_FLT_FN (BUILT_IN_CASIN):
10088 if (validate_arg (arg0, COMPLEX_TYPE)
10089 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10090 return do_mpc_arg1 (arg0, type, mpc_asin);
10091 break;
10093 CASE_FLT_FN (BUILT_IN_CACOS):
10094 if (validate_arg (arg0, COMPLEX_TYPE)
10095 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10096 return do_mpc_arg1 (arg0, type, mpc_acos);
10097 break;
10099 CASE_FLT_FN (BUILT_IN_CATAN):
10100 if (validate_arg (arg0, COMPLEX_TYPE)
10101 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10102 return do_mpc_arg1 (arg0, type, mpc_atan);
10103 break;
10105 CASE_FLT_FN (BUILT_IN_CASINH):
10106 if (validate_arg (arg0, COMPLEX_TYPE)
10107 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10108 return do_mpc_arg1 (arg0, type, mpc_asinh);
10109 break;
10111 CASE_FLT_FN (BUILT_IN_CACOSH):
10112 if (validate_arg (arg0, COMPLEX_TYPE)
10113 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10114 return do_mpc_arg1 (arg0, type, mpc_acosh);
10115 break;
10117 CASE_FLT_FN (BUILT_IN_CATANH):
10118 if (validate_arg (arg0, COMPLEX_TYPE)
10119 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10120 return do_mpc_arg1 (arg0, type, mpc_atanh);
10121 break;
10123 CASE_FLT_FN (BUILT_IN_CABS):
10124 return fold_builtin_cabs (loc, arg0, type, fndecl);
10126 CASE_FLT_FN (BUILT_IN_CARG):
10127 return fold_builtin_carg (loc, arg0, type);
10129 CASE_FLT_FN (BUILT_IN_SQRT):
10130 return fold_builtin_sqrt (loc, arg0, type);
10132 CASE_FLT_FN (BUILT_IN_CBRT):
10133 return fold_builtin_cbrt (loc, arg0, type);
10135 CASE_FLT_FN (BUILT_IN_ASIN):
10136 if (validate_arg (arg0, REAL_TYPE))
10137 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10138 &dconstm1, &dconst1, true);
10139 break;
10141 CASE_FLT_FN (BUILT_IN_ACOS):
10142 if (validate_arg (arg0, REAL_TYPE))
10143 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10144 &dconstm1, &dconst1, true);
10145 break;
10147 CASE_FLT_FN (BUILT_IN_ATAN):
10148 if (validate_arg (arg0, REAL_TYPE))
10149 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10150 break;
10152 CASE_FLT_FN (BUILT_IN_ASINH):
10153 if (validate_arg (arg0, REAL_TYPE))
10154 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10155 break;
10157 CASE_FLT_FN (BUILT_IN_ACOSH):
10158 if (validate_arg (arg0, REAL_TYPE))
10159 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10160 &dconst1, NULL, true);
10161 break;
10163 CASE_FLT_FN (BUILT_IN_ATANH):
10164 if (validate_arg (arg0, REAL_TYPE))
10165 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10166 &dconstm1, &dconst1, false);
10167 break;
10169 CASE_FLT_FN (BUILT_IN_SIN):
10170 if (validate_arg (arg0, REAL_TYPE))
10171 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10172 break;
10174 CASE_FLT_FN (BUILT_IN_COS):
10175 return fold_builtin_cos (loc, arg0, type, fndecl);
10177 CASE_FLT_FN (BUILT_IN_TAN):
10178 return fold_builtin_tan (arg0, type);
10180 CASE_FLT_FN (BUILT_IN_CEXP):
10181 return fold_builtin_cexp (loc, arg0, type);
10183 CASE_FLT_FN (BUILT_IN_CEXPI):
10184 if (validate_arg (arg0, REAL_TYPE))
10185 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10186 break;
10188 CASE_FLT_FN (BUILT_IN_SINH):
10189 if (validate_arg (arg0, REAL_TYPE))
10190 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10191 break;
10193 CASE_FLT_FN (BUILT_IN_COSH):
10194 return fold_builtin_cosh (loc, arg0, type, fndecl);
10196 CASE_FLT_FN (BUILT_IN_TANH):
10197 if (validate_arg (arg0, REAL_TYPE))
10198 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10199 break;
10201 CASE_FLT_FN (BUILT_IN_ERF):
10202 if (validate_arg (arg0, REAL_TYPE))
10203 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10204 break;
10206 CASE_FLT_FN (BUILT_IN_ERFC):
10207 if (validate_arg (arg0, REAL_TYPE))
10208 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10209 break;
10211 CASE_FLT_FN (BUILT_IN_TGAMMA):
10212 if (validate_arg (arg0, REAL_TYPE))
10213 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10214 break;
10216 CASE_FLT_FN (BUILT_IN_EXP):
10217 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10219 CASE_FLT_FN (BUILT_IN_EXP2):
10220 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10222 CASE_FLT_FN (BUILT_IN_EXP10):
10223 CASE_FLT_FN (BUILT_IN_POW10):
10224 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10226 CASE_FLT_FN (BUILT_IN_EXPM1):
10227 if (validate_arg (arg0, REAL_TYPE))
10228 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10229 break;
10231 CASE_FLT_FN (BUILT_IN_LOG):
10232 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10234 CASE_FLT_FN (BUILT_IN_LOG2):
10235 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10237 CASE_FLT_FN (BUILT_IN_LOG10):
10238 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10240 CASE_FLT_FN (BUILT_IN_LOG1P):
10241 if (validate_arg (arg0, REAL_TYPE))
10242 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10243 &dconstm1, NULL, false);
10244 break;
10246 CASE_FLT_FN (BUILT_IN_J0):
10247 if (validate_arg (arg0, REAL_TYPE))
10248 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10249 NULL, NULL, 0);
10250 break;
10252 CASE_FLT_FN (BUILT_IN_J1):
10253 if (validate_arg (arg0, REAL_TYPE))
10254 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10255 NULL, NULL, 0);
10256 break;
10258 CASE_FLT_FN (BUILT_IN_Y0):
10259 if (validate_arg (arg0, REAL_TYPE))
10260 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10261 &dconst0, NULL, false);
10262 break;
10264 CASE_FLT_FN (BUILT_IN_Y1):
10265 if (validate_arg (arg0, REAL_TYPE))
10266 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10267 &dconst0, NULL, false);
10268 break;
10270 CASE_FLT_FN (BUILT_IN_NAN):
10271 case BUILT_IN_NAND32:
10272 case BUILT_IN_NAND64:
10273 case BUILT_IN_NAND128:
10274 return fold_builtin_nan (arg0, type, true);
10276 CASE_FLT_FN (BUILT_IN_NANS):
10277 return fold_builtin_nan (arg0, type, false);
10279 CASE_FLT_FN (BUILT_IN_FLOOR):
10280 return fold_builtin_floor (loc, fndecl, arg0);
10282 CASE_FLT_FN (BUILT_IN_CEIL):
10283 return fold_builtin_ceil (loc, fndecl, arg0);
10285 CASE_FLT_FN (BUILT_IN_TRUNC):
10286 return fold_builtin_trunc (loc, fndecl, arg0);
10288 CASE_FLT_FN (BUILT_IN_ROUND):
10289 return fold_builtin_round (loc, fndecl, arg0);
10291 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10292 CASE_FLT_FN (BUILT_IN_RINT):
10293 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10295 CASE_FLT_FN (BUILT_IN_ICEIL):
10296 CASE_FLT_FN (BUILT_IN_LCEIL):
10297 CASE_FLT_FN (BUILT_IN_LLCEIL):
10298 CASE_FLT_FN (BUILT_IN_LFLOOR):
10299 CASE_FLT_FN (BUILT_IN_IFLOOR):
10300 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10301 CASE_FLT_FN (BUILT_IN_IROUND):
10302 CASE_FLT_FN (BUILT_IN_LROUND):
10303 CASE_FLT_FN (BUILT_IN_LLROUND):
10304 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10306 CASE_FLT_FN (BUILT_IN_IRINT):
10307 CASE_FLT_FN (BUILT_IN_LRINT):
10308 CASE_FLT_FN (BUILT_IN_LLRINT):
10309 return fold_fixed_mathfn (loc, fndecl, arg0);
10311 case BUILT_IN_BSWAP16:
10312 case BUILT_IN_BSWAP32:
10313 case BUILT_IN_BSWAP64:
10314 return fold_builtin_bswap (fndecl, arg0);
10316 CASE_INT_FN (BUILT_IN_FFS):
10317 CASE_INT_FN (BUILT_IN_CLZ):
10318 CASE_INT_FN (BUILT_IN_CTZ):
10319 CASE_INT_FN (BUILT_IN_CLRSB):
10320 CASE_INT_FN (BUILT_IN_POPCOUNT):
10321 CASE_INT_FN (BUILT_IN_PARITY):
10322 return fold_builtin_bitop (fndecl, arg0);
10324 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10325 return fold_builtin_signbit (loc, arg0, type);
10327 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10328 return fold_builtin_significand (loc, arg0, type);
10330 CASE_FLT_FN (BUILT_IN_ILOGB):
10331 CASE_FLT_FN (BUILT_IN_LOGB):
10332 return fold_builtin_logb (loc, arg0, type);
10334 case BUILT_IN_ISASCII:
10335 return fold_builtin_isascii (loc, arg0);
10337 case BUILT_IN_TOASCII:
10338 return fold_builtin_toascii (loc, arg0);
10340 case BUILT_IN_ISDIGIT:
10341 return fold_builtin_isdigit (loc, arg0);
10343 CASE_FLT_FN (BUILT_IN_FINITE):
10344 case BUILT_IN_FINITED32:
10345 case BUILT_IN_FINITED64:
10346 case BUILT_IN_FINITED128:
10347 case BUILT_IN_ISFINITE:
10349 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10350 if (ret)
10351 return ret;
10352 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10355 CASE_FLT_FN (BUILT_IN_ISINF):
10356 case BUILT_IN_ISINFD32:
10357 case BUILT_IN_ISINFD64:
10358 case BUILT_IN_ISINFD128:
10360 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10361 if (ret)
10362 return ret;
10363 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10366 case BUILT_IN_ISNORMAL:
10367 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10369 case BUILT_IN_ISINF_SIGN:
10370 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10372 CASE_FLT_FN (BUILT_IN_ISNAN):
10373 case BUILT_IN_ISNAND32:
10374 case BUILT_IN_ISNAND64:
10375 case BUILT_IN_ISNAND128:
10376 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10378 case BUILT_IN_PRINTF:
10379 case BUILT_IN_PRINTF_UNLOCKED:
10380 case BUILT_IN_VPRINTF:
10381 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10383 case BUILT_IN_FREE:
10384 if (integer_zerop (arg0))
10385 return build_empty_stmt (loc);
10386 break;
10388 default:
10389 break;
10392 return NULL_TREE;
10396 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10397 IGNORE is true if the result of the function call is ignored. This
10398 function returns NULL_TREE if no simplification was possible. */
10400 static tree
10401 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10403 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10404 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10406 switch (fcode)
10408 CASE_FLT_FN (BUILT_IN_JN):
10409 if (validate_arg (arg0, INTEGER_TYPE)
10410 && validate_arg (arg1, REAL_TYPE))
10411 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10412 break;
10414 CASE_FLT_FN (BUILT_IN_YN):
10415 if (validate_arg (arg0, INTEGER_TYPE)
10416 && validate_arg (arg1, REAL_TYPE))
10417 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10418 &dconst0, false);
10419 break;
10421 CASE_FLT_FN (BUILT_IN_DREM):
10422 CASE_FLT_FN (BUILT_IN_REMAINDER):
10423 if (validate_arg (arg0, REAL_TYPE)
10424 && validate_arg (arg1, REAL_TYPE))
10425 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10426 break;
10428 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10429 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10430 if (validate_arg (arg0, REAL_TYPE)
10431 && validate_arg (arg1, POINTER_TYPE))
10432 return do_mpfr_lgamma_r (arg0, arg1, type);
10433 break;
10435 CASE_FLT_FN (BUILT_IN_ATAN2):
10436 if (validate_arg (arg0, REAL_TYPE)
10437 && validate_arg (arg1, REAL_TYPE))
10438 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10439 break;
10441 CASE_FLT_FN (BUILT_IN_FDIM):
10442 if (validate_arg (arg0, REAL_TYPE)
10443 && validate_arg (arg1, REAL_TYPE))
10444 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10445 break;
10447 CASE_FLT_FN (BUILT_IN_HYPOT):
10448 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10450 CASE_FLT_FN (BUILT_IN_CPOW):
10451 if (validate_arg (arg0, COMPLEX_TYPE)
10452 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10453 && validate_arg (arg1, COMPLEX_TYPE)
10454 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10455 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10456 break;
10458 CASE_FLT_FN (BUILT_IN_LDEXP):
10459 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10460 CASE_FLT_FN (BUILT_IN_SCALBN):
10461 CASE_FLT_FN (BUILT_IN_SCALBLN):
10462 return fold_builtin_load_exponent (loc, arg0, arg1,
10463 type, /*ldexp=*/false);
10465 CASE_FLT_FN (BUILT_IN_FREXP):
10466 return fold_builtin_frexp (loc, arg0, arg1, type);
10468 CASE_FLT_FN (BUILT_IN_MODF):
10469 return fold_builtin_modf (loc, arg0, arg1, type);
10471 case BUILT_IN_STRSTR:
10472 return fold_builtin_strstr (loc, arg0, arg1, type);
10474 case BUILT_IN_STRSPN:
10475 return fold_builtin_strspn (loc, arg0, arg1);
10477 case BUILT_IN_STRCSPN:
10478 return fold_builtin_strcspn (loc, arg0, arg1);
10480 case BUILT_IN_STRCHR:
10481 case BUILT_IN_INDEX:
10482 return fold_builtin_strchr (loc, arg0, arg1, type);
10484 case BUILT_IN_STRRCHR:
10485 case BUILT_IN_RINDEX:
10486 return fold_builtin_strrchr (loc, arg0, arg1, type);
10488 case BUILT_IN_STPCPY:
10489 if (ignore)
10491 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10492 if (!fn)
10493 break;
10495 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10497 else
10498 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10499 break;
10501 case BUILT_IN_STRCMP:
10502 return fold_builtin_strcmp (loc, arg0, arg1);
10504 case BUILT_IN_STRPBRK:
10505 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10507 case BUILT_IN_EXPECT:
10508 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10510 CASE_FLT_FN (BUILT_IN_POW):
10511 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10513 CASE_FLT_FN (BUILT_IN_POWI):
10514 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10516 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10517 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10519 CASE_FLT_FN (BUILT_IN_FMIN):
10520 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10522 CASE_FLT_FN (BUILT_IN_FMAX):
10523 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10525 case BUILT_IN_ISGREATER:
10526 return fold_builtin_unordered_cmp (loc, fndecl,
10527 arg0, arg1, UNLE_EXPR, LE_EXPR);
10528 case BUILT_IN_ISGREATEREQUAL:
10529 return fold_builtin_unordered_cmp (loc, fndecl,
10530 arg0, arg1, UNLT_EXPR, LT_EXPR);
10531 case BUILT_IN_ISLESS:
10532 return fold_builtin_unordered_cmp (loc, fndecl,
10533 arg0, arg1, UNGE_EXPR, GE_EXPR);
10534 case BUILT_IN_ISLESSEQUAL:
10535 return fold_builtin_unordered_cmp (loc, fndecl,
10536 arg0, arg1, UNGT_EXPR, GT_EXPR);
10537 case BUILT_IN_ISLESSGREATER:
10538 return fold_builtin_unordered_cmp (loc, fndecl,
10539 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10540 case BUILT_IN_ISUNORDERED:
10541 return fold_builtin_unordered_cmp (loc, fndecl,
10542 arg0, arg1, UNORDERED_EXPR,
10543 NOP_EXPR);
10545 /* We do the folding for va_start in the expander. */
10546 case BUILT_IN_VA_START:
10547 break;
10549 case BUILT_IN_OBJECT_SIZE:
10550 return fold_builtin_object_size (arg0, arg1);
10552 case BUILT_IN_PRINTF:
10553 case BUILT_IN_PRINTF_UNLOCKED:
10554 case BUILT_IN_VPRINTF:
10555 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10557 case BUILT_IN_PRINTF_CHK:
10558 case BUILT_IN_VPRINTF_CHK:
10559 if (!validate_arg (arg0, INTEGER_TYPE)
10560 || TREE_SIDE_EFFECTS (arg0))
10561 return NULL_TREE;
10562 else
10563 return fold_builtin_printf (loc, fndecl,
10564 arg1, NULL_TREE, ignore, fcode);
10565 break;
10567 case BUILT_IN_FPRINTF:
10568 case BUILT_IN_FPRINTF_UNLOCKED:
10569 case BUILT_IN_VFPRINTF:
10570 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10571 ignore, fcode);
10573 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10574 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10576 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10577 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10579 default:
10580 break;
10582 return NULL_TREE;
10585 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10586 and ARG2. IGNORE is true if the result of the function call is ignored.
10587 This function returns NULL_TREE if no simplification was possible. */
10589 static tree
10590 fold_builtin_3 (location_t loc, tree fndecl,
10591 tree arg0, tree arg1, tree arg2, bool ignore)
10593 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10594 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10595 switch (fcode)
10598 CASE_FLT_FN (BUILT_IN_SINCOS):
10599 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10601 CASE_FLT_FN (BUILT_IN_FMA):
10602 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10603 break;
10605 CASE_FLT_FN (BUILT_IN_REMQUO):
10606 if (validate_arg (arg0, REAL_TYPE)
10607 && validate_arg (arg1, REAL_TYPE)
10608 && validate_arg (arg2, POINTER_TYPE))
10609 return do_mpfr_remquo (arg0, arg1, arg2);
10610 break;
10612 case BUILT_IN_STRNCAT:
10613 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10615 case BUILT_IN_STRNCMP:
10616 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10618 case BUILT_IN_MEMCHR:
10619 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10621 case BUILT_IN_BCMP:
10622 case BUILT_IN_MEMCMP:
10623 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10625 case BUILT_IN_PRINTF_CHK:
10626 case BUILT_IN_VPRINTF_CHK:
10627 if (!validate_arg (arg0, INTEGER_TYPE)
10628 || TREE_SIDE_EFFECTS (arg0))
10629 return NULL_TREE;
10630 else
10631 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10632 break;
10634 case BUILT_IN_FPRINTF:
10635 case BUILT_IN_FPRINTF_UNLOCKED:
10636 case BUILT_IN_VFPRINTF:
10637 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10638 ignore, fcode);
10640 case BUILT_IN_FPRINTF_CHK:
10641 case BUILT_IN_VFPRINTF_CHK:
10642 if (!validate_arg (arg1, INTEGER_TYPE)
10643 || TREE_SIDE_EFFECTS (arg1))
10644 return NULL_TREE;
10645 else
10646 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10647 ignore, fcode);
10649 case BUILT_IN_EXPECT:
10650 return fold_builtin_expect (loc, arg0, arg1, arg2);
10652 case BUILT_IN_ADD_OVERFLOW:
10653 case BUILT_IN_SUB_OVERFLOW:
10654 case BUILT_IN_MUL_OVERFLOW:
10655 case BUILT_IN_SADD_OVERFLOW:
10656 case BUILT_IN_SADDL_OVERFLOW:
10657 case BUILT_IN_SADDLL_OVERFLOW:
10658 case BUILT_IN_SSUB_OVERFLOW:
10659 case BUILT_IN_SSUBL_OVERFLOW:
10660 case BUILT_IN_SSUBLL_OVERFLOW:
10661 case BUILT_IN_SMUL_OVERFLOW:
10662 case BUILT_IN_SMULL_OVERFLOW:
10663 case BUILT_IN_SMULLL_OVERFLOW:
10664 case BUILT_IN_UADD_OVERFLOW:
10665 case BUILT_IN_UADDL_OVERFLOW:
10666 case BUILT_IN_UADDLL_OVERFLOW:
10667 case BUILT_IN_USUB_OVERFLOW:
10668 case BUILT_IN_USUBL_OVERFLOW:
10669 case BUILT_IN_USUBLL_OVERFLOW:
10670 case BUILT_IN_UMUL_OVERFLOW:
10671 case BUILT_IN_UMULL_OVERFLOW:
10672 case BUILT_IN_UMULLL_OVERFLOW:
10673 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10675 default:
10676 break;
10678 return NULL_TREE;
10681 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10682 ARG2, and ARG3. IGNORE is true if the result of the function call is
10683 ignored. This function returns NULL_TREE if no simplification was
10684 possible. */
10686 static tree
10687 fold_builtin_4 (location_t loc, tree fndecl,
10688 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10690 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10692 switch (fcode)
10694 case BUILT_IN_FPRINTF_CHK:
10695 case BUILT_IN_VFPRINTF_CHK:
10696 if (!validate_arg (arg1, INTEGER_TYPE)
10697 || TREE_SIDE_EFFECTS (arg1))
10698 return NULL_TREE;
10699 else
10700 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10701 ignore, fcode);
10702 break;
10704 default:
10705 break;
10707 return NULL_TREE;
10710 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10711 arguments, where NARGS <= 4. IGNORE is true if the result of the
10712 function call is ignored. This function returns NULL_TREE if no
10713 simplification was possible. Note that this only folds builtins with
10714 fixed argument patterns. Foldings that do varargs-to-varargs
10715 transformations, or that match calls with more than 4 arguments,
10716 need to be handled with fold_builtin_varargs instead. */
10718 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10720 tree
10721 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10723 tree ret = NULL_TREE;
10725 switch (nargs)
10727 case 0:
10728 ret = fold_builtin_0 (loc, fndecl, ignore);
10729 break;
10730 case 1:
10731 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10732 break;
10733 case 2:
10734 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10735 break;
10736 case 3:
10737 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10738 break;
10739 case 4:
10740 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10741 ignore);
10742 break;
10743 default:
10744 break;
10746 if (ret)
10748 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10749 SET_EXPR_LOCATION (ret, loc);
10750 TREE_NO_WARNING (ret) = 1;
10751 return ret;
10753 return NULL_TREE;
10756 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10757 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10758 of arguments in ARGS to be omitted. OLDNARGS is the number of
10759 elements in ARGS. */
10761 static tree
10762 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10763 int skip, tree fndecl, int n, va_list newargs)
10765 int nargs = oldnargs - skip + n;
10766 tree *buffer;
10768 if (n > 0)
10770 int i, j;
10772 buffer = XALLOCAVEC (tree, nargs);
10773 for (i = 0; i < n; i++)
10774 buffer[i] = va_arg (newargs, tree);
10775 for (j = skip; j < oldnargs; j++, i++)
10776 buffer[i] = args[j];
10778 else
10779 buffer = args + skip;
10781 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10784 /* Return true if FNDECL shouldn't be folded right now.
10785 If a built-in function has an inline attribute always_inline
10786 wrapper, defer folding it after always_inline functions have
10787 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10788 might not be performed. */
10790 bool
10791 avoid_folding_inline_builtin (tree fndecl)
10793 return (DECL_DECLARED_INLINE_P (fndecl)
10794 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10795 && cfun
10796 && !cfun->always_inline_functions_inlined
10797 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10800 /* A wrapper function for builtin folding that prevents warnings for
10801 "statement without effect" and the like, caused by removing the
10802 call node earlier than the warning is generated. */
10804 tree
10805 fold_call_expr (location_t loc, tree exp, bool ignore)
10807 tree ret = NULL_TREE;
10808 tree fndecl = get_callee_fndecl (exp);
10809 if (fndecl
10810 && TREE_CODE (fndecl) == FUNCTION_DECL
10811 && DECL_BUILT_IN (fndecl)
10812 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10813 yet. Defer folding until we see all the arguments
10814 (after inlining). */
10815 && !CALL_EXPR_VA_ARG_PACK (exp))
10817 int nargs = call_expr_nargs (exp);
10819 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10820 instead last argument is __builtin_va_arg_pack (). Defer folding
10821 even in that case, until arguments are finalized. */
10822 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10824 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10825 if (fndecl2
10826 && TREE_CODE (fndecl2) == FUNCTION_DECL
10827 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10828 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10829 return NULL_TREE;
10832 if (avoid_folding_inline_builtin (fndecl))
10833 return NULL_TREE;
10835 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10836 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10837 CALL_EXPR_ARGP (exp), ignore);
10838 else
10840 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10842 tree *args = CALL_EXPR_ARGP (exp);
10843 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10845 if (!ret)
10846 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10847 if (ret)
10848 return ret;
10851 return NULL_TREE;
10854 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10855 N arguments are passed in the array ARGARRAY. */
10857 tree
10858 fold_builtin_call_array (location_t loc, tree type,
10859 tree fn,
10860 int n,
10861 tree *argarray)
10863 tree ret = NULL_TREE;
10864 tree exp;
10866 if (TREE_CODE (fn) == ADDR_EXPR)
10868 tree fndecl = TREE_OPERAND (fn, 0);
10869 if (TREE_CODE (fndecl) == FUNCTION_DECL
10870 && DECL_BUILT_IN (fndecl))
10872 /* If last argument is __builtin_va_arg_pack (), arguments to this
10873 function are not finalized yet. Defer folding until they are. */
10874 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10876 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10877 if (fndecl2
10878 && TREE_CODE (fndecl2) == FUNCTION_DECL
10879 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10880 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10881 return build_call_array_loc (loc, type, fn, n, argarray);
10883 if (avoid_folding_inline_builtin (fndecl))
10884 return build_call_array_loc (loc, type, fn, n, argarray);
10885 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10887 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10888 if (ret)
10889 return ret;
10891 return build_call_array_loc (loc, type, fn, n, argarray);
10893 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10895 /* First try the transformations that don't require consing up
10896 an exp. */
10897 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10898 if (ret)
10899 return ret;
10902 /* If we got this far, we need to build an exp. */
10903 exp = build_call_array_loc (loc, type, fn, n, argarray);
10904 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10905 return ret ? ret : exp;
10909 return build_call_array_loc (loc, type, fn, n, argarray);
10912 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10913 along with N new arguments specified as the "..." parameters. SKIP
10914 is the number of arguments in EXP to be omitted. This function is used
10915 to do varargs-to-varargs transformations. */
10917 static tree
10918 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10920 va_list ap;
10921 tree t;
10923 va_start (ap, n);
10924 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10925 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10926 va_end (ap);
10928 return t;
10931 /* Validate a single argument ARG against a tree code CODE representing
10932 a type. */
10934 static bool
10935 validate_arg (const_tree arg, enum tree_code code)
10937 if (!arg)
10938 return false;
10939 else if (code == POINTER_TYPE)
10940 return POINTER_TYPE_P (TREE_TYPE (arg));
10941 else if (code == INTEGER_TYPE)
10942 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10943 return code == TREE_CODE (TREE_TYPE (arg));
10946 /* This function validates the types of a function call argument list
10947 against a specified list of tree_codes. If the last specifier is a 0,
10948 that represents an ellipses, otherwise the last specifier must be a
10949 VOID_TYPE.
10951 This is the GIMPLE version of validate_arglist. Eventually we want to
10952 completely convert builtins.c to work from GIMPLEs and the tree based
10953 validate_arglist will then be removed. */
10955 bool
10956 validate_gimple_arglist (const gcall *call, ...)
10958 enum tree_code code;
10959 bool res = 0;
10960 va_list ap;
10961 const_tree arg;
10962 size_t i;
10964 va_start (ap, call);
10965 i = 0;
10969 code = (enum tree_code) va_arg (ap, int);
10970 switch (code)
10972 case 0:
10973 /* This signifies an ellipses, any further arguments are all ok. */
10974 res = true;
10975 goto end;
10976 case VOID_TYPE:
10977 /* This signifies an endlink, if no arguments remain, return
10978 true, otherwise return false. */
10979 res = (i == gimple_call_num_args (call));
10980 goto end;
10981 default:
10982 /* If no parameters remain or the parameter's code does not
10983 match the specified code, return false. Otherwise continue
10984 checking any remaining arguments. */
10985 arg = gimple_call_arg (call, i++);
10986 if (!validate_arg (arg, code))
10987 goto end;
10988 break;
10991 while (1);
10993 /* We need gotos here since we can only have one VA_CLOSE in a
10994 function. */
10995 end: ;
10996 va_end (ap);
10998 return res;
11001 /* Default target-specific builtin expander that does nothing. */
11004 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11005 rtx target ATTRIBUTE_UNUSED,
11006 rtx subtarget ATTRIBUTE_UNUSED,
11007 machine_mode mode ATTRIBUTE_UNUSED,
11008 int ignore ATTRIBUTE_UNUSED)
11010 return NULL_RTX;
11013 /* Returns true is EXP represents data that would potentially reside
11014 in a readonly section. */
11016 bool
11017 readonly_data_expr (tree exp)
11019 STRIP_NOPS (exp);
11021 if (TREE_CODE (exp) != ADDR_EXPR)
11022 return false;
11024 exp = get_base_address (TREE_OPERAND (exp, 0));
11025 if (!exp)
11026 return false;
11028 /* Make sure we call decl_readonly_section only for trees it
11029 can handle (since it returns true for everything it doesn't
11030 understand). */
11031 if (TREE_CODE (exp) == STRING_CST
11032 || TREE_CODE (exp) == CONSTRUCTOR
11033 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11034 return decl_readonly_section (exp, 0);
11035 else
11036 return false;
11039 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11040 to the call, and TYPE is its return type.
11042 Return NULL_TREE if no simplification was possible, otherwise return the
11043 simplified form of the call as a tree.
11045 The simplified form may be a constant or other expression which
11046 computes the same value, but in a more efficient manner (including
11047 calls to other builtin functions).
11049 The call may contain arguments which need to be evaluated, but
11050 which are not useful to determine the result of the call. In
11051 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11052 COMPOUND_EXPR will be an argument which must be evaluated.
11053 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11054 COMPOUND_EXPR in the chain will contain the tree for the simplified
11055 form of the builtin function call. */
11057 static tree
11058 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11060 if (!validate_arg (s1, POINTER_TYPE)
11061 || !validate_arg (s2, POINTER_TYPE))
11062 return NULL_TREE;
11063 else
11065 tree fn;
11066 const char *p1, *p2;
11068 p2 = c_getstr (s2);
11069 if (p2 == NULL)
11070 return NULL_TREE;
11072 p1 = c_getstr (s1);
11073 if (p1 != NULL)
11075 const char *r = strstr (p1, p2);
11076 tree tem;
11078 if (r == NULL)
11079 return build_int_cst (TREE_TYPE (s1), 0);
11081 /* Return an offset into the constant string argument. */
11082 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11083 return fold_convert_loc (loc, type, tem);
11086 /* The argument is const char *, and the result is char *, so we need
11087 a type conversion here to avoid a warning. */
11088 if (p2[0] == '\0')
11089 return fold_convert_loc (loc, type, s1);
11091 if (p2[1] != '\0')
11092 return NULL_TREE;
11094 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11095 if (!fn)
11096 return NULL_TREE;
11098 /* New argument list transforming strstr(s1, s2) to
11099 strchr(s1, s2[0]). */
11100 return build_call_expr_loc (loc, fn, 2, s1,
11101 build_int_cst (integer_type_node, p2[0]));
11105 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11106 the call, and TYPE is its return type.
11108 Return NULL_TREE if no simplification was possible, otherwise return the
11109 simplified form of the call as a tree.
11111 The simplified form may be a constant or other expression which
11112 computes the same value, but in a more efficient manner (including
11113 calls to other builtin functions).
11115 The call may contain arguments which need to be evaluated, but
11116 which are not useful to determine the result of the call. In
11117 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11118 COMPOUND_EXPR will be an argument which must be evaluated.
11119 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11120 COMPOUND_EXPR in the chain will contain the tree for the simplified
11121 form of the builtin function call. */
11123 static tree
11124 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11126 if (!validate_arg (s1, POINTER_TYPE)
11127 || !validate_arg (s2, INTEGER_TYPE))
11128 return NULL_TREE;
11129 else
11131 const char *p1;
11133 if (TREE_CODE (s2) != INTEGER_CST)
11134 return NULL_TREE;
11136 p1 = c_getstr (s1);
11137 if (p1 != NULL)
11139 char c;
11140 const char *r;
11141 tree tem;
11143 if (target_char_cast (s2, &c))
11144 return NULL_TREE;
11146 r = strchr (p1, c);
11148 if (r == NULL)
11149 return build_int_cst (TREE_TYPE (s1), 0);
11151 /* Return an offset into the constant string argument. */
11152 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11153 return fold_convert_loc (loc, type, tem);
11155 return NULL_TREE;
11159 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11160 the call, and TYPE is its return type.
11162 Return NULL_TREE if no simplification was possible, otherwise return the
11163 simplified form of the call as a tree.
11165 The simplified form may be a constant or other expression which
11166 computes the same value, but in a more efficient manner (including
11167 calls to other builtin functions).
11169 The call may contain arguments which need to be evaluated, but
11170 which are not useful to determine the result of the call. In
11171 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11172 COMPOUND_EXPR will be an argument which must be evaluated.
11173 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11174 COMPOUND_EXPR in the chain will contain the tree for the simplified
11175 form of the builtin function call. */
11177 static tree
11178 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11180 if (!validate_arg (s1, POINTER_TYPE)
11181 || !validate_arg (s2, INTEGER_TYPE))
11182 return NULL_TREE;
11183 else
11185 tree fn;
11186 const char *p1;
11188 if (TREE_CODE (s2) != INTEGER_CST)
11189 return NULL_TREE;
11191 p1 = c_getstr (s1);
11192 if (p1 != NULL)
11194 char c;
11195 const char *r;
11196 tree tem;
11198 if (target_char_cast (s2, &c))
11199 return NULL_TREE;
11201 r = strrchr (p1, c);
11203 if (r == NULL)
11204 return build_int_cst (TREE_TYPE (s1), 0);
11206 /* Return an offset into the constant string argument. */
11207 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11208 return fold_convert_loc (loc, type, tem);
11211 if (! integer_zerop (s2))
11212 return NULL_TREE;
11214 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11215 if (!fn)
11216 return NULL_TREE;
11218 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11219 return build_call_expr_loc (loc, fn, 2, s1, s2);
11223 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11224 to the call, and TYPE is its return type.
11226 Return NULL_TREE if no simplification was possible, otherwise return the
11227 simplified form of the call as a tree.
11229 The simplified form may be a constant or other expression which
11230 computes the same value, but in a more efficient manner (including
11231 calls to other builtin functions).
11233 The call may contain arguments which need to be evaluated, but
11234 which are not useful to determine the result of the call. In
11235 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11236 COMPOUND_EXPR will be an argument which must be evaluated.
11237 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11238 COMPOUND_EXPR in the chain will contain the tree for the simplified
11239 form of the builtin function call. */
11241 static tree
11242 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11244 if (!validate_arg (s1, POINTER_TYPE)
11245 || !validate_arg (s2, POINTER_TYPE))
11246 return NULL_TREE;
11247 else
11249 tree fn;
11250 const char *p1, *p2;
11252 p2 = c_getstr (s2);
11253 if (p2 == NULL)
11254 return NULL_TREE;
11256 p1 = c_getstr (s1);
11257 if (p1 != NULL)
11259 const char *r = strpbrk (p1, p2);
11260 tree tem;
11262 if (r == NULL)
11263 return build_int_cst (TREE_TYPE (s1), 0);
11265 /* Return an offset into the constant string argument. */
11266 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11267 return fold_convert_loc (loc, type, tem);
11270 if (p2[0] == '\0')
11271 /* strpbrk(x, "") == NULL.
11272 Evaluate and ignore s1 in case it had side-effects. */
11273 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11275 if (p2[1] != '\0')
11276 return NULL_TREE; /* Really call strpbrk. */
11278 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11279 if (!fn)
11280 return NULL_TREE;
11282 /* New argument list transforming strpbrk(s1, s2) to
11283 strchr(s1, s2[0]). */
11284 return build_call_expr_loc (loc, fn, 2, s1,
11285 build_int_cst (integer_type_node, p2[0]));
11289 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11290 arguments to the call.
11292 Return NULL_TREE if no simplification was possible, otherwise return the
11293 simplified form of the call as a tree.
11295 The simplified form may be a constant or other expression which
11296 computes the same value, but in a more efficient manner (including
11297 calls to other builtin functions).
11299 The call may contain arguments which need to be evaluated, but
11300 which are not useful to determine the result of the call. In
11301 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11302 COMPOUND_EXPR will be an argument which must be evaluated.
11303 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11304 COMPOUND_EXPR in the chain will contain the tree for the simplified
11305 form of the builtin function call. */
11307 static tree
11308 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11310 if (!validate_arg (dst, POINTER_TYPE)
11311 || !validate_arg (src, POINTER_TYPE)
11312 || !validate_arg (len, INTEGER_TYPE))
11313 return NULL_TREE;
11314 else
11316 const char *p = c_getstr (src);
11318 /* If the requested length is zero, or the src parameter string
11319 length is zero, return the dst parameter. */
11320 if (integer_zerop (len) || (p && *p == '\0'))
11321 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11323 /* If the requested len is greater than or equal to the string
11324 length, call strcat. */
11325 if (TREE_CODE (len) == INTEGER_CST && p
11326 && compare_tree_int (len, strlen (p)) >= 0)
11328 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11330 /* If the replacement _DECL isn't initialized, don't do the
11331 transformation. */
11332 if (!fn)
11333 return NULL_TREE;
11335 return build_call_expr_loc (loc, fn, 2, dst, src);
11337 return NULL_TREE;
11341 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11342 to the call.
11344 Return NULL_TREE if no simplification was possible, otherwise return the
11345 simplified form of the call as a tree.
11347 The simplified form may be a constant or other expression which
11348 computes the same value, but in a more efficient manner (including
11349 calls to other builtin functions).
11351 The call may contain arguments which need to be evaluated, but
11352 which are not useful to determine the result of the call. In
11353 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11354 COMPOUND_EXPR will be an argument which must be evaluated.
11355 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11356 COMPOUND_EXPR in the chain will contain the tree for the simplified
11357 form of the builtin function call. */
11359 static tree
11360 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11362 if (!validate_arg (s1, POINTER_TYPE)
11363 || !validate_arg (s2, POINTER_TYPE))
11364 return NULL_TREE;
11365 else
11367 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11369 /* If both arguments are constants, evaluate at compile-time. */
11370 if (p1 && p2)
11372 const size_t r = strspn (p1, p2);
11373 return build_int_cst (size_type_node, r);
11376 /* If either argument is "", return NULL_TREE. */
11377 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11378 /* Evaluate and ignore both arguments in case either one has
11379 side-effects. */
11380 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11381 s1, s2);
11382 return NULL_TREE;
11386 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11387 to the call.
11389 Return NULL_TREE if no simplification was possible, otherwise return the
11390 simplified form of the call as a tree.
11392 The simplified form may be a constant or other expression which
11393 computes the same value, but in a more efficient manner (including
11394 calls to other builtin functions).
11396 The call may contain arguments which need to be evaluated, but
11397 which are not useful to determine the result of the call. In
11398 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11399 COMPOUND_EXPR will be an argument which must be evaluated.
11400 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11401 COMPOUND_EXPR in the chain will contain the tree for the simplified
11402 form of the builtin function call. */
11404 static tree
11405 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11407 if (!validate_arg (s1, POINTER_TYPE)
11408 || !validate_arg (s2, POINTER_TYPE))
11409 return NULL_TREE;
11410 else
11412 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11414 /* If both arguments are constants, evaluate at compile-time. */
11415 if (p1 && p2)
11417 const size_t r = strcspn (p1, p2);
11418 return build_int_cst (size_type_node, r);
11421 /* If the first argument is "", return NULL_TREE. */
11422 if (p1 && *p1 == '\0')
11424 /* Evaluate and ignore argument s2 in case it has
11425 side-effects. */
11426 return omit_one_operand_loc (loc, size_type_node,
11427 size_zero_node, s2);
11430 /* If the second argument is "", return __builtin_strlen(s1). */
11431 if (p2 && *p2 == '\0')
11433 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11435 /* If the replacement _DECL isn't initialized, don't do the
11436 transformation. */
11437 if (!fn)
11438 return NULL_TREE;
11440 return build_call_expr_loc (loc, fn, 1, s1);
11442 return NULL_TREE;
11446 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11447 produced. False otherwise. This is done so that we don't output the error
11448 or warning twice or three times. */
11450 bool
11451 fold_builtin_next_arg (tree exp, bool va_start_p)
11453 tree fntype = TREE_TYPE (current_function_decl);
11454 int nargs = call_expr_nargs (exp);
11455 tree arg;
11456 /* There is good chance the current input_location points inside the
11457 definition of the va_start macro (perhaps on the token for
11458 builtin) in a system header, so warnings will not be emitted.
11459 Use the location in real source code. */
11460 source_location current_location =
11461 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11462 NULL);
11464 if (!stdarg_p (fntype))
11466 error ("%<va_start%> used in function with fixed args");
11467 return true;
11470 if (va_start_p)
11472 if (va_start_p && (nargs != 2))
11474 error ("wrong number of arguments to function %<va_start%>");
11475 return true;
11477 arg = CALL_EXPR_ARG (exp, 1);
11479 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11480 when we checked the arguments and if needed issued a warning. */
11481 else
11483 if (nargs == 0)
11485 /* Evidently an out of date version of <stdarg.h>; can't validate
11486 va_start's second argument, but can still work as intended. */
11487 warning_at (current_location,
11488 OPT_Wvarargs,
11489 "%<__builtin_next_arg%> called without an argument");
11490 return true;
11492 else if (nargs > 1)
11494 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11495 return true;
11497 arg = CALL_EXPR_ARG (exp, 0);
11500 if (TREE_CODE (arg) == SSA_NAME)
11501 arg = SSA_NAME_VAR (arg);
11503 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11504 or __builtin_next_arg (0) the first time we see it, after checking
11505 the arguments and if needed issuing a warning. */
11506 if (!integer_zerop (arg))
11508 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11510 /* Strip off all nops for the sake of the comparison. This
11511 is not quite the same as STRIP_NOPS. It does more.
11512 We must also strip off INDIRECT_EXPR for C++ reference
11513 parameters. */
11514 while (CONVERT_EXPR_P (arg)
11515 || TREE_CODE (arg) == INDIRECT_REF)
11516 arg = TREE_OPERAND (arg, 0);
11517 if (arg != last_parm)
11519 /* FIXME: Sometimes with the tree optimizers we can get the
11520 not the last argument even though the user used the last
11521 argument. We just warn and set the arg to be the last
11522 argument so that we will get wrong-code because of
11523 it. */
11524 warning_at (current_location,
11525 OPT_Wvarargs,
11526 "second parameter of %<va_start%> not last named argument");
11529 /* Undefined by C99 7.15.1.4p4 (va_start):
11530 "If the parameter parmN is declared with the register storage
11531 class, with a function or array type, or with a type that is
11532 not compatible with the type that results after application of
11533 the default argument promotions, the behavior is undefined."
11535 else if (DECL_REGISTER (arg))
11537 warning_at (current_location,
11538 OPT_Wvarargs,
11539 "undefined behaviour when second parameter of "
11540 "%<va_start%> is declared with %<register%> storage");
11543 /* We want to verify the second parameter just once before the tree
11544 optimizers are run and then avoid keeping it in the tree,
11545 as otherwise we could warn even for correct code like:
11546 void foo (int i, ...)
11547 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11548 if (va_start_p)
11549 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11550 else
11551 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11553 return false;
11557 /* Expand a call EXP to __builtin_object_size. */
11559 static rtx
11560 expand_builtin_object_size (tree exp)
11562 tree ost;
11563 int object_size_type;
11564 tree fndecl = get_callee_fndecl (exp);
11566 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11568 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11569 exp, fndecl);
11570 expand_builtin_trap ();
11571 return const0_rtx;
11574 ost = CALL_EXPR_ARG (exp, 1);
11575 STRIP_NOPS (ost);
11577 if (TREE_CODE (ost) != INTEGER_CST
11578 || tree_int_cst_sgn (ost) < 0
11579 || compare_tree_int (ost, 3) > 0)
11581 error ("%Klast argument of %D is not integer constant between 0 and 3",
11582 exp, fndecl);
11583 expand_builtin_trap ();
11584 return const0_rtx;
11587 object_size_type = tree_to_shwi (ost);
11589 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11592 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11593 FCODE is the BUILT_IN_* to use.
11594 Return NULL_RTX if we failed; the caller should emit a normal call,
11595 otherwise try to get the result in TARGET, if convenient (and in
11596 mode MODE if that's convenient). */
11598 static rtx
11599 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11600 enum built_in_function fcode)
11602 tree dest, src, len, size;
11604 if (!validate_arglist (exp,
11605 POINTER_TYPE,
11606 fcode == BUILT_IN_MEMSET_CHK
11607 ? INTEGER_TYPE : POINTER_TYPE,
11608 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11609 return NULL_RTX;
11611 dest = CALL_EXPR_ARG (exp, 0);
11612 src = CALL_EXPR_ARG (exp, 1);
11613 len = CALL_EXPR_ARG (exp, 2);
11614 size = CALL_EXPR_ARG (exp, 3);
11616 if (! tree_fits_uhwi_p (size))
11617 return NULL_RTX;
11619 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11621 tree fn;
11623 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11625 warning_at (tree_nonartificial_location (exp),
11626 0, "%Kcall to %D will always overflow destination buffer",
11627 exp, get_callee_fndecl (exp));
11628 return NULL_RTX;
11631 fn = NULL_TREE;
11632 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11633 mem{cpy,pcpy,move,set} is available. */
11634 switch (fcode)
11636 case BUILT_IN_MEMCPY_CHK:
11637 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11638 break;
11639 case BUILT_IN_MEMPCPY_CHK:
11640 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11641 break;
11642 case BUILT_IN_MEMMOVE_CHK:
11643 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11644 break;
11645 case BUILT_IN_MEMSET_CHK:
11646 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11647 break;
11648 default:
11649 break;
11652 if (! fn)
11653 return NULL_RTX;
11655 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11656 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11657 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11658 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11660 else if (fcode == BUILT_IN_MEMSET_CHK)
11661 return NULL_RTX;
11662 else
11664 unsigned int dest_align = get_pointer_alignment (dest);
11666 /* If DEST is not a pointer type, call the normal function. */
11667 if (dest_align == 0)
11668 return NULL_RTX;
11670 /* If SRC and DEST are the same (and not volatile), do nothing. */
11671 if (operand_equal_p (src, dest, 0))
11673 tree expr;
11675 if (fcode != BUILT_IN_MEMPCPY_CHK)
11677 /* Evaluate and ignore LEN in case it has side-effects. */
11678 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11679 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11682 expr = fold_build_pointer_plus (dest, len);
11683 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11686 /* __memmove_chk special case. */
11687 if (fcode == BUILT_IN_MEMMOVE_CHK)
11689 unsigned int src_align = get_pointer_alignment (src);
11691 if (src_align == 0)
11692 return NULL_RTX;
11694 /* If src is categorized for a readonly section we can use
11695 normal __memcpy_chk. */
11696 if (readonly_data_expr (src))
11698 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11699 if (!fn)
11700 return NULL_RTX;
11701 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11702 dest, src, len, size);
11703 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11704 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11705 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11708 return NULL_RTX;
11712 /* Emit warning if a buffer overflow is detected at compile time. */
11714 static void
11715 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11717 int is_strlen = 0;
11718 tree len, size;
11719 location_t loc = tree_nonartificial_location (exp);
11721 switch (fcode)
11723 case BUILT_IN_STRCPY_CHK:
11724 case BUILT_IN_STPCPY_CHK:
11725 /* For __strcat_chk the warning will be emitted only if overflowing
11726 by at least strlen (dest) + 1 bytes. */
11727 case BUILT_IN_STRCAT_CHK:
11728 len = CALL_EXPR_ARG (exp, 1);
11729 size = CALL_EXPR_ARG (exp, 2);
11730 is_strlen = 1;
11731 break;
11732 case BUILT_IN_STRNCAT_CHK:
11733 case BUILT_IN_STRNCPY_CHK:
11734 case BUILT_IN_STPNCPY_CHK:
11735 len = CALL_EXPR_ARG (exp, 2);
11736 size = CALL_EXPR_ARG (exp, 3);
11737 break;
11738 case BUILT_IN_SNPRINTF_CHK:
11739 case BUILT_IN_VSNPRINTF_CHK:
11740 len = CALL_EXPR_ARG (exp, 1);
11741 size = CALL_EXPR_ARG (exp, 3);
11742 break;
11743 default:
11744 gcc_unreachable ();
11747 if (!len || !size)
11748 return;
11750 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11751 return;
11753 if (is_strlen)
11755 len = c_strlen (len, 1);
11756 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11757 return;
11759 else if (fcode == BUILT_IN_STRNCAT_CHK)
11761 tree src = CALL_EXPR_ARG (exp, 1);
11762 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11763 return;
11764 src = c_strlen (src, 1);
11765 if (! src || ! tree_fits_uhwi_p (src))
11767 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11768 exp, get_callee_fndecl (exp));
11769 return;
11771 else if (tree_int_cst_lt (src, size))
11772 return;
11774 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11775 return;
11777 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11778 exp, get_callee_fndecl (exp));
11781 /* Emit warning if a buffer overflow is detected at compile time
11782 in __sprintf_chk/__vsprintf_chk calls. */
11784 static void
11785 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11787 tree size, len, fmt;
11788 const char *fmt_str;
11789 int nargs = call_expr_nargs (exp);
11791 /* Verify the required arguments in the original call. */
11793 if (nargs < 4)
11794 return;
11795 size = CALL_EXPR_ARG (exp, 2);
11796 fmt = CALL_EXPR_ARG (exp, 3);
11798 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11799 return;
11801 /* Check whether the format is a literal string constant. */
11802 fmt_str = c_getstr (fmt);
11803 if (fmt_str == NULL)
11804 return;
11806 if (!init_target_chars ())
11807 return;
11809 /* If the format doesn't contain % args or %%, we know its size. */
11810 if (strchr (fmt_str, target_percent) == 0)
11811 len = build_int_cstu (size_type_node, strlen (fmt_str));
11812 /* If the format is "%s" and first ... argument is a string literal,
11813 we know it too. */
11814 else if (fcode == BUILT_IN_SPRINTF_CHK
11815 && strcmp (fmt_str, target_percent_s) == 0)
11817 tree arg;
11819 if (nargs < 5)
11820 return;
11821 arg = CALL_EXPR_ARG (exp, 4);
11822 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11823 return;
11825 len = c_strlen (arg, 1);
11826 if (!len || ! tree_fits_uhwi_p (len))
11827 return;
11829 else
11830 return;
11832 if (! tree_int_cst_lt (len, size))
11833 warning_at (tree_nonartificial_location (exp),
11834 0, "%Kcall to %D will always overflow destination buffer",
11835 exp, get_callee_fndecl (exp));
11838 /* Emit warning if a free is called with address of a variable. */
11840 static void
11841 maybe_emit_free_warning (tree exp)
11843 tree arg = CALL_EXPR_ARG (exp, 0);
11845 STRIP_NOPS (arg);
11846 if (TREE_CODE (arg) != ADDR_EXPR)
11847 return;
11849 arg = get_base_address (TREE_OPERAND (arg, 0));
11850 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11851 return;
11853 if (SSA_VAR_P (arg))
11854 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11855 "%Kattempt to free a non-heap object %qD", exp, arg);
11856 else
11857 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11858 "%Kattempt to free a non-heap object", exp);
11861 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11862 if possible. */
11864 static tree
11865 fold_builtin_object_size (tree ptr, tree ost)
11867 unsigned HOST_WIDE_INT bytes;
11868 int object_size_type;
11870 if (!validate_arg (ptr, POINTER_TYPE)
11871 || !validate_arg (ost, INTEGER_TYPE))
11872 return NULL_TREE;
11874 STRIP_NOPS (ost);
11876 if (TREE_CODE (ost) != INTEGER_CST
11877 || tree_int_cst_sgn (ost) < 0
11878 || compare_tree_int (ost, 3) > 0)
11879 return NULL_TREE;
11881 object_size_type = tree_to_shwi (ost);
11883 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11884 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11885 and (size_t) 0 for types 2 and 3. */
11886 if (TREE_SIDE_EFFECTS (ptr))
11887 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11889 if (TREE_CODE (ptr) == ADDR_EXPR)
11891 bytes = compute_builtin_object_size (ptr, object_size_type);
11892 if (wi::fits_to_tree_p (bytes, size_type_node))
11893 return build_int_cstu (size_type_node, bytes);
11895 else if (TREE_CODE (ptr) == SSA_NAME)
11897 /* If object size is not known yet, delay folding until
11898 later. Maybe subsequent passes will help determining
11899 it. */
11900 bytes = compute_builtin_object_size (ptr, object_size_type);
11901 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11902 && wi::fits_to_tree_p (bytes, size_type_node))
11903 return build_int_cstu (size_type_node, bytes);
11906 return NULL_TREE;
11909 /* Builtins with folding operations that operate on "..." arguments
11910 need special handling; we need to store the arguments in a convenient
11911 data structure before attempting any folding. Fortunately there are
11912 only a few builtins that fall into this category. FNDECL is the
11913 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11914 result of the function call is ignored. */
11916 static tree
11917 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11918 bool ignore ATTRIBUTE_UNUSED)
11920 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11921 tree ret = NULL_TREE;
11923 switch (fcode)
11925 case BUILT_IN_FPCLASSIFY:
11926 ret = fold_builtin_fpclassify (loc, exp);
11927 break;
11929 default:
11930 break;
11932 if (ret)
11934 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11935 SET_EXPR_LOCATION (ret, loc);
11936 TREE_NO_WARNING (ret) = 1;
11937 return ret;
11939 return NULL_TREE;
11942 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
11943 FMT and ARG are the arguments to the call; we don't fold cases with
11944 more than 2 arguments, and ARG may be null if this is a 1-argument case.
11946 Return NULL_TREE if no simplification was possible, otherwise return the
11947 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11948 code of the function to be simplified. */
11950 static tree
11951 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
11952 tree arg, bool ignore,
11953 enum built_in_function fcode)
11955 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
11956 const char *fmt_str = NULL;
11958 /* If the return value is used, don't do the transformation. */
11959 if (! ignore)
11960 return NULL_TREE;
11962 /* Verify the required arguments in the original call. */
11963 if (!validate_arg (fmt, POINTER_TYPE))
11964 return NULL_TREE;
11966 /* Check whether the format is a literal string constant. */
11967 fmt_str = c_getstr (fmt);
11968 if (fmt_str == NULL)
11969 return NULL_TREE;
11971 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
11973 /* If we're using an unlocked function, assume the other
11974 unlocked functions exist explicitly. */
11975 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
11976 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
11978 else
11980 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
11981 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
11984 if (!init_target_chars ())
11985 return NULL_TREE;
11987 if (strcmp (fmt_str, target_percent_s) == 0
11988 || strchr (fmt_str, target_percent) == NULL)
11990 const char *str;
11992 if (strcmp (fmt_str, target_percent_s) == 0)
11994 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11995 return NULL_TREE;
11997 if (!arg || !validate_arg (arg, POINTER_TYPE))
11998 return NULL_TREE;
12000 str = c_getstr (arg);
12001 if (str == NULL)
12002 return NULL_TREE;
12004 else
12006 /* The format specifier doesn't contain any '%' characters. */
12007 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12008 && arg)
12009 return NULL_TREE;
12010 str = fmt_str;
12013 /* If the string was "", printf does nothing. */
12014 if (str[0] == '\0')
12015 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12017 /* If the string has length of 1, call putchar. */
12018 if (str[1] == '\0')
12020 /* Given printf("c"), (where c is any one character,)
12021 convert "c"[0] to an int and pass that to the replacement
12022 function. */
12023 newarg = build_int_cst (integer_type_node, str[0]);
12024 if (fn_putchar)
12025 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12027 else
12029 /* If the string was "string\n", call puts("string"). */
12030 size_t len = strlen (str);
12031 if ((unsigned char)str[len - 1] == target_newline
12032 && (size_t) (int) len == len
12033 && (int) len > 0)
12035 char *newstr;
12036 tree offset_node, string_cst;
12038 /* Create a NUL-terminated string that's one char shorter
12039 than the original, stripping off the trailing '\n'. */
12040 newarg = build_string_literal (len, str);
12041 string_cst = string_constant (newarg, &offset_node);
12042 gcc_checking_assert (string_cst
12043 && (TREE_STRING_LENGTH (string_cst)
12044 == (int) len)
12045 && integer_zerop (offset_node)
12046 && (unsigned char)
12047 TREE_STRING_POINTER (string_cst)[len - 1]
12048 == target_newline);
12049 /* build_string_literal creates a new STRING_CST,
12050 modify it in place to avoid double copying. */
12051 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
12052 newstr[len - 1] = '\0';
12053 if (fn_puts)
12054 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12056 else
12057 /* We'd like to arrange to call fputs(string,stdout) here,
12058 but we need stdout and don't have a way to get it yet. */
12059 return NULL_TREE;
12063 /* The other optimizations can be done only on the non-va_list variants. */
12064 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12065 return NULL_TREE;
12067 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12068 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12070 if (!arg || !validate_arg (arg, POINTER_TYPE))
12071 return NULL_TREE;
12072 if (fn_puts)
12073 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12076 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12077 else if (strcmp (fmt_str, target_percent_c) == 0)
12079 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12080 return NULL_TREE;
12081 if (fn_putchar)
12082 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12085 if (!call)
12086 return NULL_TREE;
12088 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12091 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12092 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12093 more than 3 arguments, and ARG may be null in the 2-argument case.
12095 Return NULL_TREE if no simplification was possible, otherwise return the
12096 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12097 code of the function to be simplified. */
12099 static tree
12100 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12101 tree fmt, tree arg, bool ignore,
12102 enum built_in_function fcode)
12104 tree fn_fputc, fn_fputs, call = NULL_TREE;
12105 const char *fmt_str = NULL;
12107 /* If the return value is used, don't do the transformation. */
12108 if (! ignore)
12109 return NULL_TREE;
12111 /* Verify the required arguments in the original call. */
12112 if (!validate_arg (fp, POINTER_TYPE))
12113 return NULL_TREE;
12114 if (!validate_arg (fmt, POINTER_TYPE))
12115 return NULL_TREE;
12117 /* Check whether the format is a literal string constant. */
12118 fmt_str = c_getstr (fmt);
12119 if (fmt_str == NULL)
12120 return NULL_TREE;
12122 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12124 /* If we're using an unlocked function, assume the other
12125 unlocked functions exist explicitly. */
12126 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
12127 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
12129 else
12131 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
12132 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
12135 if (!init_target_chars ())
12136 return NULL_TREE;
12138 /* If the format doesn't contain % args or %%, use strcpy. */
12139 if (strchr (fmt_str, target_percent) == NULL)
12141 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12142 && arg)
12143 return NULL_TREE;
12145 /* If the format specifier was "", fprintf does nothing. */
12146 if (fmt_str[0] == '\0')
12148 /* If FP has side-effects, just wait until gimplification is
12149 done. */
12150 if (TREE_SIDE_EFFECTS (fp))
12151 return NULL_TREE;
12153 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12156 /* When "string" doesn't contain %, replace all cases of
12157 fprintf (fp, string) with fputs (string, fp). The fputs
12158 builtin will take care of special cases like length == 1. */
12159 if (fn_fputs)
12160 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12163 /* The other optimizations can be done only on the non-va_list variants. */
12164 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12165 return NULL_TREE;
12167 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12168 else if (strcmp (fmt_str, target_percent_s) == 0)
12170 if (!arg || !validate_arg (arg, POINTER_TYPE))
12171 return NULL_TREE;
12172 if (fn_fputs)
12173 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12176 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12177 else if (strcmp (fmt_str, target_percent_c) == 0)
12179 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12180 return NULL_TREE;
12181 if (fn_fputc)
12182 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12185 if (!call)
12186 return NULL_TREE;
12187 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12190 /* Initialize format string characters in the target charset. */
12192 bool
12193 init_target_chars (void)
12195 static bool init;
12196 if (!init)
12198 target_newline = lang_hooks.to_target_charset ('\n');
12199 target_percent = lang_hooks.to_target_charset ('%');
12200 target_c = lang_hooks.to_target_charset ('c');
12201 target_s = lang_hooks.to_target_charset ('s');
12202 if (target_newline == 0 || target_percent == 0 || target_c == 0
12203 || target_s == 0)
12204 return false;
12206 target_percent_c[0] = target_percent;
12207 target_percent_c[1] = target_c;
12208 target_percent_c[2] = '\0';
12210 target_percent_s[0] = target_percent;
12211 target_percent_s[1] = target_s;
12212 target_percent_s[2] = '\0';
12214 target_percent_s_newline[0] = target_percent;
12215 target_percent_s_newline[1] = target_s;
12216 target_percent_s_newline[2] = target_newline;
12217 target_percent_s_newline[3] = '\0';
12219 init = true;
12221 return true;
12224 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12225 and no overflow/underflow occurred. INEXACT is true if M was not
12226 exactly calculated. TYPE is the tree type for the result. This
12227 function assumes that you cleared the MPFR flags and then
12228 calculated M to see if anything subsequently set a flag prior to
12229 entering this function. Return NULL_TREE if any checks fail. */
12231 static tree
12232 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12234 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12235 overflow/underflow occurred. If -frounding-math, proceed iff the
12236 result of calling FUNC was exact. */
12237 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12238 && (!flag_rounding_math || !inexact))
12240 REAL_VALUE_TYPE rr;
12242 real_from_mpfr (&rr, m, type, GMP_RNDN);
12243 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12244 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12245 but the mpft_t is not, then we underflowed in the
12246 conversion. */
12247 if (real_isfinite (&rr)
12248 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12250 REAL_VALUE_TYPE rmode;
12252 real_convert (&rmode, TYPE_MODE (type), &rr);
12253 /* Proceed iff the specified mode can hold the value. */
12254 if (real_identical (&rmode, &rr))
12255 return build_real (type, rmode);
12258 return NULL_TREE;
12261 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12262 number and no overflow/underflow occurred. INEXACT is true if M
12263 was not exactly calculated. TYPE is the tree type for the result.
12264 This function assumes that you cleared the MPFR flags and then
12265 calculated M to see if anything subsequently set a flag prior to
12266 entering this function. Return NULL_TREE if any checks fail, if
12267 FORCE_CONVERT is true, then bypass the checks. */
12269 static tree
12270 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12272 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12273 overflow/underflow occurred. If -frounding-math, proceed iff the
12274 result of calling FUNC was exact. */
12275 if (force_convert
12276 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12277 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12278 && (!flag_rounding_math || !inexact)))
12280 REAL_VALUE_TYPE re, im;
12282 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12283 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12284 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12285 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12286 but the mpft_t is not, then we underflowed in the
12287 conversion. */
12288 if (force_convert
12289 || (real_isfinite (&re) && real_isfinite (&im)
12290 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12291 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12293 REAL_VALUE_TYPE re_mode, im_mode;
12295 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12296 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12297 /* Proceed iff the specified mode can hold the value. */
12298 if (force_convert
12299 || (real_identical (&re_mode, &re)
12300 && real_identical (&im_mode, &im)))
12301 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12302 build_real (TREE_TYPE (type), im_mode));
12305 return NULL_TREE;
12308 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12309 FUNC on it and return the resulting value as a tree with type TYPE.
12310 If MIN and/or MAX are not NULL, then the supplied ARG must be
12311 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12312 acceptable values, otherwise they are not. The mpfr precision is
12313 set to the precision of TYPE. We assume that function FUNC returns
12314 zero if the result could be calculated exactly within the requested
12315 precision. */
12317 static tree
12318 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12319 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12320 bool inclusive)
12322 tree result = NULL_TREE;
12324 STRIP_NOPS (arg);
12326 /* To proceed, MPFR must exactly represent the target floating point
12327 format, which only happens when the target base equals two. */
12328 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12329 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12331 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12333 if (real_isfinite (ra)
12334 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12335 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12337 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12338 const int prec = fmt->p;
12339 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12340 int inexact;
12341 mpfr_t m;
12343 mpfr_init2 (m, prec);
12344 mpfr_from_real (m, ra, GMP_RNDN);
12345 mpfr_clear_flags ();
12346 inexact = func (m, m, rnd);
12347 result = do_mpfr_ckconv (m, type, inexact);
12348 mpfr_clear (m);
12352 return result;
12355 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12356 FUNC on it and return the resulting value as a tree with type TYPE.
12357 The mpfr precision is set to the precision of TYPE. We assume that
12358 function FUNC returns zero if the result could be calculated
12359 exactly within the requested precision. */
12361 static tree
12362 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12363 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12365 tree result = NULL_TREE;
12367 STRIP_NOPS (arg1);
12368 STRIP_NOPS (arg2);
12370 /* To proceed, MPFR must exactly represent the target floating point
12371 format, which only happens when the target base equals two. */
12372 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12373 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12374 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12376 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12377 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12379 if (real_isfinite (ra1) && real_isfinite (ra2))
12381 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12382 const int prec = fmt->p;
12383 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12384 int inexact;
12385 mpfr_t m1, m2;
12387 mpfr_inits2 (prec, m1, m2, NULL);
12388 mpfr_from_real (m1, ra1, GMP_RNDN);
12389 mpfr_from_real (m2, ra2, GMP_RNDN);
12390 mpfr_clear_flags ();
12391 inexact = func (m1, m1, m2, rnd);
12392 result = do_mpfr_ckconv (m1, type, inexact);
12393 mpfr_clears (m1, m2, NULL);
12397 return result;
12400 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12401 FUNC on it and return the resulting value as a tree with type TYPE.
12402 The mpfr precision is set to the precision of TYPE. We assume that
12403 function FUNC returns zero if the result could be calculated
12404 exactly within the requested precision. */
12406 static tree
12407 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12408 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12410 tree result = NULL_TREE;
12412 STRIP_NOPS (arg1);
12413 STRIP_NOPS (arg2);
12414 STRIP_NOPS (arg3);
12416 /* To proceed, MPFR must exactly represent the target floating point
12417 format, which only happens when the target base equals two. */
12418 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12419 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12420 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12421 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12423 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12424 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12425 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12427 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12429 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12430 const int prec = fmt->p;
12431 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12432 int inexact;
12433 mpfr_t m1, m2, m3;
12435 mpfr_inits2 (prec, m1, m2, m3, NULL);
12436 mpfr_from_real (m1, ra1, GMP_RNDN);
12437 mpfr_from_real (m2, ra2, GMP_RNDN);
12438 mpfr_from_real (m3, ra3, GMP_RNDN);
12439 mpfr_clear_flags ();
12440 inexact = func (m1, m1, m2, m3, rnd);
12441 result = do_mpfr_ckconv (m1, type, inexact);
12442 mpfr_clears (m1, m2, m3, NULL);
12446 return result;
12449 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12450 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12451 If ARG_SINP and ARG_COSP are NULL then the result is returned
12452 as a complex value.
12453 The type is taken from the type of ARG and is used for setting the
12454 precision of the calculation and results. */
12456 static tree
12457 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12459 tree const type = TREE_TYPE (arg);
12460 tree result = NULL_TREE;
12462 STRIP_NOPS (arg);
12464 /* To proceed, MPFR must exactly represent the target floating point
12465 format, which only happens when the target base equals two. */
12466 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12467 && TREE_CODE (arg) == REAL_CST
12468 && !TREE_OVERFLOW (arg))
12470 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12472 if (real_isfinite (ra))
12474 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12475 const int prec = fmt->p;
12476 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12477 tree result_s, result_c;
12478 int inexact;
12479 mpfr_t m, ms, mc;
12481 mpfr_inits2 (prec, m, ms, mc, NULL);
12482 mpfr_from_real (m, ra, GMP_RNDN);
12483 mpfr_clear_flags ();
12484 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12485 result_s = do_mpfr_ckconv (ms, type, inexact);
12486 result_c = do_mpfr_ckconv (mc, type, inexact);
12487 mpfr_clears (m, ms, mc, NULL);
12488 if (result_s && result_c)
12490 /* If we are to return in a complex value do so. */
12491 if (!arg_sinp && !arg_cosp)
12492 return build_complex (build_complex_type (type),
12493 result_c, result_s);
12495 /* Dereference the sin/cos pointer arguments. */
12496 arg_sinp = build_fold_indirect_ref (arg_sinp);
12497 arg_cosp = build_fold_indirect_ref (arg_cosp);
12498 /* Proceed if valid pointer type were passed in. */
12499 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12500 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12502 /* Set the values. */
12503 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12504 result_s);
12505 TREE_SIDE_EFFECTS (result_s) = 1;
12506 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12507 result_c);
12508 TREE_SIDE_EFFECTS (result_c) = 1;
12509 /* Combine the assignments into a compound expr. */
12510 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12511 result_s, result_c));
12516 return result;
12519 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12520 two-argument mpfr order N Bessel function FUNC on them and return
12521 the resulting value as a tree with type TYPE. The mpfr precision
12522 is set to the precision of TYPE. We assume that function FUNC
12523 returns zero if the result could be calculated exactly within the
12524 requested precision. */
12525 static tree
12526 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12527 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12528 const REAL_VALUE_TYPE *min, bool inclusive)
12530 tree result = NULL_TREE;
12532 STRIP_NOPS (arg1);
12533 STRIP_NOPS (arg2);
12535 /* To proceed, MPFR must exactly represent the target floating point
12536 format, which only happens when the target base equals two. */
12537 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12538 && tree_fits_shwi_p (arg1)
12539 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12541 const HOST_WIDE_INT n = tree_to_shwi (arg1);
12542 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12544 if (n == (long)n
12545 && real_isfinite (ra)
12546 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12548 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12549 const int prec = fmt->p;
12550 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12551 int inexact;
12552 mpfr_t m;
12554 mpfr_init2 (m, prec);
12555 mpfr_from_real (m, ra, GMP_RNDN);
12556 mpfr_clear_flags ();
12557 inexact = func (m, n, m, rnd);
12558 result = do_mpfr_ckconv (m, type, inexact);
12559 mpfr_clear (m);
12563 return result;
12566 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12567 the pointer *(ARG_QUO) and return the result. The type is taken
12568 from the type of ARG0 and is used for setting the precision of the
12569 calculation and results. */
12571 static tree
12572 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12574 tree const type = TREE_TYPE (arg0);
12575 tree result = NULL_TREE;
12577 STRIP_NOPS (arg0);
12578 STRIP_NOPS (arg1);
12580 /* To proceed, MPFR must exactly represent the target floating point
12581 format, which only happens when the target base equals two. */
12582 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12583 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12584 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12586 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12587 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12589 if (real_isfinite (ra0) && real_isfinite (ra1))
12591 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12592 const int prec = fmt->p;
12593 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12594 tree result_rem;
12595 long integer_quo;
12596 mpfr_t m0, m1;
12598 mpfr_inits2 (prec, m0, m1, NULL);
12599 mpfr_from_real (m0, ra0, GMP_RNDN);
12600 mpfr_from_real (m1, ra1, GMP_RNDN);
12601 mpfr_clear_flags ();
12602 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12603 /* Remquo is independent of the rounding mode, so pass
12604 inexact=0 to do_mpfr_ckconv(). */
12605 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12606 mpfr_clears (m0, m1, NULL);
12607 if (result_rem)
12609 /* MPFR calculates quo in the host's long so it may
12610 return more bits in quo than the target int can hold
12611 if sizeof(host long) > sizeof(target int). This can
12612 happen even for native compilers in LP64 mode. In
12613 these cases, modulo the quo value with the largest
12614 number that the target int can hold while leaving one
12615 bit for the sign. */
12616 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12617 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12619 /* Dereference the quo pointer argument. */
12620 arg_quo = build_fold_indirect_ref (arg_quo);
12621 /* Proceed iff a valid pointer type was passed in. */
12622 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12624 /* Set the value. */
12625 tree result_quo
12626 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12627 build_int_cst (TREE_TYPE (arg_quo),
12628 integer_quo));
12629 TREE_SIDE_EFFECTS (result_quo) = 1;
12630 /* Combine the quo assignment with the rem. */
12631 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12632 result_quo, result_rem));
12637 return result;
12640 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12641 resulting value as a tree with type TYPE. The mpfr precision is
12642 set to the precision of TYPE. We assume that this mpfr function
12643 returns zero if the result could be calculated exactly within the
12644 requested precision. In addition, the integer pointer represented
12645 by ARG_SG will be dereferenced and set to the appropriate signgam
12646 (-1,1) value. */
12648 static tree
12649 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12651 tree result = NULL_TREE;
12653 STRIP_NOPS (arg);
12655 /* To proceed, MPFR must exactly represent the target floating point
12656 format, which only happens when the target base equals two. Also
12657 verify ARG is a constant and that ARG_SG is an int pointer. */
12658 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12659 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12660 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12661 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12663 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12665 /* In addition to NaN and Inf, the argument cannot be zero or a
12666 negative integer. */
12667 if (real_isfinite (ra)
12668 && ra->cl != rvc_zero
12669 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12671 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12672 const int prec = fmt->p;
12673 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12674 int inexact, sg;
12675 mpfr_t m;
12676 tree result_lg;
12678 mpfr_init2 (m, prec);
12679 mpfr_from_real (m, ra, GMP_RNDN);
12680 mpfr_clear_flags ();
12681 inexact = mpfr_lgamma (m, &sg, m, rnd);
12682 result_lg = do_mpfr_ckconv (m, type, inexact);
12683 mpfr_clear (m);
12684 if (result_lg)
12686 tree result_sg;
12688 /* Dereference the arg_sg pointer argument. */
12689 arg_sg = build_fold_indirect_ref (arg_sg);
12690 /* Assign the signgam value into *arg_sg. */
12691 result_sg = fold_build2 (MODIFY_EXPR,
12692 TREE_TYPE (arg_sg), arg_sg,
12693 build_int_cst (TREE_TYPE (arg_sg), sg));
12694 TREE_SIDE_EFFECTS (result_sg) = 1;
12695 /* Combine the signgam assignment with the lgamma result. */
12696 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12697 result_sg, result_lg));
12702 return result;
12705 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12706 function FUNC on it and return the resulting value as a tree with
12707 type TYPE. The mpfr precision is set to the precision of TYPE. We
12708 assume that function FUNC returns zero if the result could be
12709 calculated exactly within the requested precision. */
12711 static tree
12712 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12714 tree result = NULL_TREE;
12716 STRIP_NOPS (arg);
12718 /* To proceed, MPFR must exactly represent the target floating point
12719 format, which only happens when the target base equals two. */
12720 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12721 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12722 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12724 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12725 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12727 if (real_isfinite (re) && real_isfinite (im))
12729 const struct real_format *const fmt =
12730 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12731 const int prec = fmt->p;
12732 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12733 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12734 int inexact;
12735 mpc_t m;
12737 mpc_init2 (m, prec);
12738 mpfr_from_real (mpc_realref (m), re, rnd);
12739 mpfr_from_real (mpc_imagref (m), im, rnd);
12740 mpfr_clear_flags ();
12741 inexact = func (m, m, crnd);
12742 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12743 mpc_clear (m);
12747 return result;
12750 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12751 mpc function FUNC on it and return the resulting value as a tree
12752 with type TYPE. The mpfr precision is set to the precision of
12753 TYPE. We assume that function FUNC returns zero if the result
12754 could be calculated exactly within the requested precision. If
12755 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12756 in the arguments and/or results. */
12758 tree
12759 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12760 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12762 tree result = NULL_TREE;
12764 STRIP_NOPS (arg0);
12765 STRIP_NOPS (arg1);
12767 /* To proceed, MPFR must exactly represent the target floating point
12768 format, which only happens when the target base equals two. */
12769 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12770 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12771 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12772 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12773 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12775 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12776 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12777 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12778 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12780 if (do_nonfinite
12781 || (real_isfinite (re0) && real_isfinite (im0)
12782 && real_isfinite (re1) && real_isfinite (im1)))
12784 const struct real_format *const fmt =
12785 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12786 const int prec = fmt->p;
12787 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12788 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12789 int inexact;
12790 mpc_t m0, m1;
12792 mpc_init2 (m0, prec);
12793 mpc_init2 (m1, prec);
12794 mpfr_from_real (mpc_realref (m0), re0, rnd);
12795 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12796 mpfr_from_real (mpc_realref (m1), re1, rnd);
12797 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12798 mpfr_clear_flags ();
12799 inexact = func (m0, m0, m1, crnd);
12800 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12801 mpc_clear (m0);
12802 mpc_clear (m1);
12806 return result;
12809 /* A wrapper function for builtin folding that prevents warnings for
12810 "statement without effect" and the like, caused by removing the
12811 call node earlier than the warning is generated. */
12813 tree
12814 fold_call_stmt (gcall *stmt, bool ignore)
12816 tree ret = NULL_TREE;
12817 tree fndecl = gimple_call_fndecl (stmt);
12818 location_t loc = gimple_location (stmt);
12819 if (fndecl
12820 && TREE_CODE (fndecl) == FUNCTION_DECL
12821 && DECL_BUILT_IN (fndecl)
12822 && !gimple_call_va_arg_pack_p (stmt))
12824 int nargs = gimple_call_num_args (stmt);
12825 tree *args = (nargs > 0
12826 ? gimple_call_arg_ptr (stmt, 0)
12827 : &error_mark_node);
12829 if (avoid_folding_inline_builtin (fndecl))
12830 return NULL_TREE;
12831 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12833 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12835 else
12837 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
12838 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12839 if (ret)
12841 /* Propagate location information from original call to
12842 expansion of builtin. Otherwise things like
12843 maybe_emit_chk_warning, that operate on the expansion
12844 of a builtin, will use the wrong location information. */
12845 if (gimple_has_location (stmt))
12847 tree realret = ret;
12848 if (TREE_CODE (ret) == NOP_EXPR)
12849 realret = TREE_OPERAND (ret, 0);
12850 if (CAN_HAVE_LOCATION_P (realret)
12851 && !EXPR_HAS_LOCATION (realret))
12852 SET_EXPR_LOCATION (realret, loc);
12853 return realret;
12855 return ret;
12859 return NULL_TREE;
12862 /* Look up the function in builtin_decl that corresponds to DECL
12863 and set ASMSPEC as its user assembler name. DECL must be a
12864 function decl that declares a builtin. */
12866 void
12867 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12869 tree builtin;
12870 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12871 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12872 && asmspec != 0);
12874 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12875 set_user_assembler_name (builtin, asmspec);
12876 switch (DECL_FUNCTION_CODE (decl))
12878 case BUILT_IN_MEMCPY:
12879 init_block_move_fn (asmspec);
12880 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12881 break;
12882 case BUILT_IN_MEMSET:
12883 init_block_clear_fn (asmspec);
12884 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12885 break;
12886 case BUILT_IN_MEMMOVE:
12887 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12888 break;
12889 case BUILT_IN_MEMCMP:
12890 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12891 break;
12892 case BUILT_IN_ABORT:
12893 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12894 break;
12895 case BUILT_IN_FFS:
12896 if (INT_TYPE_SIZE < BITS_PER_WORD)
12898 set_user_assembler_libfunc ("ffs", asmspec);
12899 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12900 MODE_INT, 0), "ffs");
12902 break;
12903 default:
12904 break;
12908 /* Return true if DECL is a builtin that expands to a constant or similarly
12909 simple code. */
12910 bool
12911 is_simple_builtin (tree decl)
12913 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12914 switch (DECL_FUNCTION_CODE (decl))
12916 /* Builtins that expand to constants. */
12917 case BUILT_IN_CONSTANT_P:
12918 case BUILT_IN_EXPECT:
12919 case BUILT_IN_OBJECT_SIZE:
12920 case BUILT_IN_UNREACHABLE:
12921 /* Simple register moves or loads from stack. */
12922 case BUILT_IN_ASSUME_ALIGNED:
12923 case BUILT_IN_RETURN_ADDRESS:
12924 case BUILT_IN_EXTRACT_RETURN_ADDR:
12925 case BUILT_IN_FROB_RETURN_ADDR:
12926 case BUILT_IN_RETURN:
12927 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12928 case BUILT_IN_FRAME_ADDRESS:
12929 case BUILT_IN_VA_END:
12930 case BUILT_IN_STACK_SAVE:
12931 case BUILT_IN_STACK_RESTORE:
12932 /* Exception state returns or moves registers around. */
12933 case BUILT_IN_EH_FILTER:
12934 case BUILT_IN_EH_POINTER:
12935 case BUILT_IN_EH_COPY_VALUES:
12936 return true;
12938 default:
12939 return false;
12942 return false;
12945 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12946 most probably expanded inline into reasonably simple code. This is a
12947 superset of is_simple_builtin. */
12948 bool
12949 is_inexpensive_builtin (tree decl)
12951 if (!decl)
12952 return false;
12953 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12954 return true;
12955 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12956 switch (DECL_FUNCTION_CODE (decl))
12958 case BUILT_IN_ABS:
12959 case BUILT_IN_ALLOCA:
12960 case BUILT_IN_ALLOCA_WITH_ALIGN:
12961 case BUILT_IN_BSWAP16:
12962 case BUILT_IN_BSWAP32:
12963 case BUILT_IN_BSWAP64:
12964 case BUILT_IN_CLZ:
12965 case BUILT_IN_CLZIMAX:
12966 case BUILT_IN_CLZL:
12967 case BUILT_IN_CLZLL:
12968 case BUILT_IN_CTZ:
12969 case BUILT_IN_CTZIMAX:
12970 case BUILT_IN_CTZL:
12971 case BUILT_IN_CTZLL:
12972 case BUILT_IN_FFS:
12973 case BUILT_IN_FFSIMAX:
12974 case BUILT_IN_FFSL:
12975 case BUILT_IN_FFSLL:
12976 case BUILT_IN_IMAXABS:
12977 case BUILT_IN_FINITE:
12978 case BUILT_IN_FINITEF:
12979 case BUILT_IN_FINITEL:
12980 case BUILT_IN_FINITED32:
12981 case BUILT_IN_FINITED64:
12982 case BUILT_IN_FINITED128:
12983 case BUILT_IN_FPCLASSIFY:
12984 case BUILT_IN_ISFINITE:
12985 case BUILT_IN_ISINF_SIGN:
12986 case BUILT_IN_ISINF:
12987 case BUILT_IN_ISINFF:
12988 case BUILT_IN_ISINFL:
12989 case BUILT_IN_ISINFD32:
12990 case BUILT_IN_ISINFD64:
12991 case BUILT_IN_ISINFD128:
12992 case BUILT_IN_ISNAN:
12993 case BUILT_IN_ISNANF:
12994 case BUILT_IN_ISNANL:
12995 case BUILT_IN_ISNAND32:
12996 case BUILT_IN_ISNAND64:
12997 case BUILT_IN_ISNAND128:
12998 case BUILT_IN_ISNORMAL:
12999 case BUILT_IN_ISGREATER:
13000 case BUILT_IN_ISGREATEREQUAL:
13001 case BUILT_IN_ISLESS:
13002 case BUILT_IN_ISLESSEQUAL:
13003 case BUILT_IN_ISLESSGREATER:
13004 case BUILT_IN_ISUNORDERED:
13005 case BUILT_IN_VA_ARG_PACK:
13006 case BUILT_IN_VA_ARG_PACK_LEN:
13007 case BUILT_IN_VA_COPY:
13008 case BUILT_IN_TRAP:
13009 case BUILT_IN_SAVEREGS:
13010 case BUILT_IN_POPCOUNTL:
13011 case BUILT_IN_POPCOUNTLL:
13012 case BUILT_IN_POPCOUNTIMAX:
13013 case BUILT_IN_POPCOUNT:
13014 case BUILT_IN_PARITYL:
13015 case BUILT_IN_PARITYLL:
13016 case BUILT_IN_PARITYIMAX:
13017 case BUILT_IN_PARITY:
13018 case BUILT_IN_LABS:
13019 case BUILT_IN_LLABS:
13020 case BUILT_IN_PREFETCH:
13021 return true;
13023 default:
13024 return is_simple_builtin (decl);
13027 return false;