2014-08-15 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / builtins.c
blob846856897390b2d945510c526b48465ce01129f5
1 /* Expand builtin functions.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "calls.h"
30 #include "varasm.h"
31 #include "tree-object-size.h"
32 #include "realmpfr.h"
33 #include "basic-block.h"
34 #include "tree-ssa-alias.h"
35 #include "internal-fn.h"
36 #include "gimple-expr.h"
37 #include "is-a.h"
38 #include "gimple.h"
39 #include "flags.h"
40 #include "regs.h"
41 #include "hard-reg-set.h"
42 #include "except.h"
43 #include "function.h"
44 #include "insn-config.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "recog.h"
49 #include "output.h"
50 #include "typeclass.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "langhooks.h"
55 #include "tree-ssanames.h"
56 #include "tree-dfa.h"
57 #include "value-prof.h"
58 #include "diagnostic-core.h"
59 #include "builtins.h"
60 #include "ubsan.h"
61 #include "cilk.h"
64 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
66 struct target_builtins default_target_builtins;
67 #if SWITCHABLE_TARGET
68 struct target_builtins *this_target_builtins = &default_target_builtins;
69 #endif
71 /* Define the names of the builtin function types and codes. */
72 const char *const built_in_class_names[BUILT_IN_LAST]
73 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
75 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
76 const char * built_in_names[(int) END_BUILTINS] =
78 #include "builtins.def"
80 #undef DEF_BUILTIN
82 /* Setup an array of _DECL trees, make sure each element is
83 initialized to NULL_TREE. */
84 builtin_info_type builtin_info;
86 /* Non-zero if __builtin_constant_p should be folded right away. */
87 bool force_folding_builtin_constant_p;
89 static rtx c_readstr (const char *, enum machine_mode);
90 static int target_char_cast (tree, char *);
91 static rtx get_memory_rtx (tree, tree);
92 static int apply_args_size (void);
93 static int apply_result_size (void);
94 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
95 static rtx result_vector (int, rtx);
96 #endif
97 static void expand_builtin_update_setjmp_buf (rtx);
98 static void expand_builtin_prefetch (tree);
99 static rtx expand_builtin_apply_args (void);
100 static rtx expand_builtin_apply_args_1 (void);
101 static rtx expand_builtin_apply (rtx, rtx, rtx);
102 static void expand_builtin_return (rtx);
103 static enum type_class type_to_class (tree);
104 static rtx expand_builtin_classify_type (tree);
105 static void expand_errno_check (tree, rtx);
106 static rtx expand_builtin_mathfn (tree, rtx, rtx);
107 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
109 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
110 static rtx expand_builtin_interclass_mathfn (tree, rtx);
111 static rtx expand_builtin_sincos (tree);
112 static rtx expand_builtin_cexpi (tree, rtx);
113 static rtx expand_builtin_int_roundingfn (tree, rtx);
114 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
115 static rtx expand_builtin_next_arg (void);
116 static rtx expand_builtin_va_start (tree);
117 static rtx expand_builtin_va_end (tree);
118 static rtx expand_builtin_va_copy (tree);
119 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strcmp (tree, rtx);
121 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
122 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
123 static rtx expand_builtin_memcpy (tree, rtx);
124 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
126 enum machine_mode, int);
127 static rtx expand_builtin_strcpy (tree, rtx);
128 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
129 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx);
131 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
132 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
133 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
134 static rtx expand_builtin_bzero (tree);
135 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_alloca (tree, bool);
137 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
138 static rtx expand_builtin_frame_address (tree, tree);
139 static tree stabilize_va_list_loc (location_t, tree, int);
140 static rtx expand_builtin_expect (tree, rtx);
141 static tree fold_builtin_constant_p (tree);
142 static tree fold_builtin_classify_type (tree);
143 static tree fold_builtin_strlen (location_t, tree, tree);
144 static tree fold_builtin_inf (location_t, tree, int);
145 static tree fold_builtin_nan (tree, tree, int);
146 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
147 static bool validate_arg (const_tree, enum tree_code code);
148 static bool integer_valued_real_p (tree);
149 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
150 static rtx expand_builtin_fabs (tree, rtx, rtx);
151 static rtx expand_builtin_signbit (tree, rtx);
152 static tree fold_builtin_sqrt (location_t, tree, tree);
153 static tree fold_builtin_cbrt (location_t, tree, tree);
154 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
155 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
156 static tree fold_builtin_cos (location_t, tree, tree, tree);
157 static tree fold_builtin_cosh (location_t, tree, tree, tree);
158 static tree fold_builtin_tan (tree, tree);
159 static tree fold_builtin_trunc (location_t, tree, tree);
160 static tree fold_builtin_floor (location_t, tree, tree);
161 static tree fold_builtin_ceil (location_t, tree, tree);
162 static tree fold_builtin_round (location_t, tree, tree);
163 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
164 static tree fold_builtin_bitop (tree, tree);
165 static tree fold_builtin_strchr (location_t, tree, tree, tree);
166 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
167 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
168 static tree fold_builtin_strcmp (location_t, tree, tree);
169 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
170 static tree fold_builtin_signbit (location_t, tree, tree);
171 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
172 static tree fold_builtin_isascii (location_t, tree);
173 static tree fold_builtin_toascii (location_t, tree);
174 static tree fold_builtin_isdigit (location_t, tree);
175 static tree fold_builtin_fabs (location_t, tree, tree);
176 static tree fold_builtin_abs (location_t, tree, tree);
177 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
178 enum tree_code);
179 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
180 static tree fold_builtin_0 (location_t, tree, bool);
181 static tree fold_builtin_1 (location_t, tree, tree, bool);
182 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
183 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
184 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
185 static tree fold_builtin_varargs (location_t, tree, tree, bool);
187 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
188 static tree fold_builtin_strstr (location_t, tree, tree, tree);
189 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
190 static tree fold_builtin_strncat (location_t, tree, tree, tree);
191 static tree fold_builtin_strspn (location_t, tree, tree);
192 static tree fold_builtin_strcspn (location_t, tree, tree);
193 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
195 static rtx expand_builtin_object_size (tree);
196 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
197 enum built_in_function);
198 static void maybe_emit_chk_warning (tree, enum built_in_function);
199 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
200 static void maybe_emit_free_warning (tree);
201 static tree fold_builtin_object_size (tree, tree);
202 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
203 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
204 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
205 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
206 enum built_in_function);
208 static unsigned HOST_WIDE_INT target_newline;
209 unsigned HOST_WIDE_INT target_percent;
210 static unsigned HOST_WIDE_INT target_c;
211 static unsigned HOST_WIDE_INT target_s;
212 static char target_percent_c[3];
213 char target_percent_s[3];
214 static char target_percent_s_newline[4];
215 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
216 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
217 static tree do_mpfr_arg2 (tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_arg3 (tree, tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_sincos (tree, tree, tree);
222 static tree do_mpfr_bessel_n (tree, tree, tree,
223 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_remquo (tree, tree, tree);
226 static tree do_mpfr_lgamma_r (tree, tree, tree);
227 static void expand_builtin_sync_synchronize (void);
229 /* Return true if NAME starts with __builtin_ or __sync_. */
231 static bool
232 is_builtin_name (const char *name)
234 if (strncmp (name, "__builtin_", 10) == 0)
235 return true;
236 if (strncmp (name, "__sync_", 7) == 0)
237 return true;
238 if (strncmp (name, "__atomic_", 9) == 0)
239 return true;
240 if (flag_cilkplus
241 && (!strcmp (name, "__cilkrts_detach")
242 || !strcmp (name, "__cilkrts_pop_frame")))
243 return true;
244 return false;
248 /* Return true if DECL is a function symbol representing a built-in. */
250 bool
251 is_builtin_fn (tree decl)
253 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
256 /* Return true if NODE should be considered for inline expansion regardless
257 of the optimization level. This means whenever a function is invoked with
258 its "internal" name, which normally contains the prefix "__builtin". */
260 static bool
261 called_as_built_in (tree node)
263 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
264 we want the name used to call the function, not the name it
265 will have. */
266 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
267 return is_builtin_name (name);
270 /* Compute values M and N such that M divides (address of EXP - N) and such
271 that N < M. If these numbers can be determined, store M in alignp and N in
272 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
273 *alignp and any bit-offset to *bitposp.
275 Note that the address (and thus the alignment) computed here is based
276 on the address to which a symbol resolves, whereas DECL_ALIGN is based
277 on the address at which an object is actually located. These two
278 addresses are not always the same. For example, on ARM targets,
279 the address &foo of a Thumb function foo() has the lowest bit set,
280 whereas foo() itself starts on an even address.
282 If ADDR_P is true we are taking the address of the memory reference EXP
283 and thus cannot rely on the access taking place. */
285 static bool
286 get_object_alignment_2 (tree exp, unsigned int *alignp,
287 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
289 HOST_WIDE_INT bitsize, bitpos;
290 tree offset;
291 enum machine_mode mode;
292 int unsignedp, volatilep;
293 unsigned int align = BITS_PER_UNIT;
294 bool known_alignment = false;
296 /* Get the innermost object and the constant (bitpos) and possibly
297 variable (offset) offset of the access. */
298 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
299 &mode, &unsignedp, &volatilep, true);
301 /* Extract alignment information from the innermost object and
302 possibly adjust bitpos and offset. */
303 if (TREE_CODE (exp) == FUNCTION_DECL)
305 /* Function addresses can encode extra information besides their
306 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
307 allows the low bit to be used as a virtual bit, we know
308 that the address itself must be at least 2-byte aligned. */
309 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
310 align = 2 * BITS_PER_UNIT;
312 else if (TREE_CODE (exp) == LABEL_DECL)
314 else if (TREE_CODE (exp) == CONST_DECL)
316 /* The alignment of a CONST_DECL is determined by its initializer. */
317 exp = DECL_INITIAL (exp);
318 align = TYPE_ALIGN (TREE_TYPE (exp));
319 #ifdef CONSTANT_ALIGNMENT
320 if (CONSTANT_CLASS_P (exp))
321 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
322 #endif
323 known_alignment = true;
325 else if (DECL_P (exp))
327 align = DECL_ALIGN (exp);
328 known_alignment = true;
330 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
332 align = TYPE_ALIGN (TREE_TYPE (exp));
334 else if (TREE_CODE (exp) == INDIRECT_REF
335 || TREE_CODE (exp) == MEM_REF
336 || TREE_CODE (exp) == TARGET_MEM_REF)
338 tree addr = TREE_OPERAND (exp, 0);
339 unsigned ptr_align;
340 unsigned HOST_WIDE_INT ptr_bitpos;
342 if (TREE_CODE (addr) == BIT_AND_EXPR
343 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
345 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
346 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
347 align *= BITS_PER_UNIT;
348 addr = TREE_OPERAND (addr, 0);
351 known_alignment
352 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
353 align = MAX (ptr_align, align);
355 /* The alignment of the pointer operand in a TARGET_MEM_REF
356 has to take the variable offset parts into account. */
357 if (TREE_CODE (exp) == TARGET_MEM_REF)
359 if (TMR_INDEX (exp))
361 unsigned HOST_WIDE_INT step = 1;
362 if (TMR_STEP (exp))
363 step = TREE_INT_CST_LOW (TMR_STEP (exp));
364 align = MIN (align, (step & -step) * BITS_PER_UNIT);
366 if (TMR_INDEX2 (exp))
367 align = BITS_PER_UNIT;
368 known_alignment = false;
371 /* When EXP is an actual memory reference then we can use
372 TYPE_ALIGN of a pointer indirection to derive alignment.
373 Do so only if get_pointer_alignment_1 did not reveal absolute
374 alignment knowledge and if using that alignment would
375 improve the situation. */
376 if (!addr_p && !known_alignment
377 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
378 align = TYPE_ALIGN (TREE_TYPE (exp));
379 else
381 /* Else adjust bitpos accordingly. */
382 bitpos += ptr_bitpos;
383 if (TREE_CODE (exp) == MEM_REF
384 || TREE_CODE (exp) == TARGET_MEM_REF)
385 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
388 else if (TREE_CODE (exp) == STRING_CST)
390 /* STRING_CST are the only constant objects we allow to be not
391 wrapped inside a CONST_DECL. */
392 align = TYPE_ALIGN (TREE_TYPE (exp));
393 #ifdef CONSTANT_ALIGNMENT
394 if (CONSTANT_CLASS_P (exp))
395 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
396 #endif
397 known_alignment = true;
400 /* If there is a non-constant offset part extract the maximum
401 alignment that can prevail. */
402 if (offset)
404 unsigned int trailing_zeros = tree_ctz (offset);
405 if (trailing_zeros < HOST_BITS_PER_INT)
407 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
408 if (inner)
409 align = MIN (align, inner);
413 *alignp = align;
414 *bitposp = bitpos & (*alignp - 1);
415 return known_alignment;
418 /* For a memory reference expression EXP compute values M and N such that M
419 divides (&EXP - N) and such that N < M. If these numbers can be determined,
420 store M in alignp and N in *BITPOSP and return true. Otherwise return false
421 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
423 bool
424 get_object_alignment_1 (tree exp, unsigned int *alignp,
425 unsigned HOST_WIDE_INT *bitposp)
427 return get_object_alignment_2 (exp, alignp, bitposp, false);
430 /* Return the alignment in bits of EXP, an object. */
432 unsigned int
433 get_object_alignment (tree exp)
435 unsigned HOST_WIDE_INT bitpos = 0;
436 unsigned int align;
438 get_object_alignment_1 (exp, &align, &bitpos);
440 /* align and bitpos now specify known low bits of the pointer.
441 ptr & (align - 1) == bitpos. */
443 if (bitpos != 0)
444 align = (bitpos & -bitpos);
445 return align;
448 /* For a pointer valued expression EXP compute values M and N such that M
449 divides (EXP - N) and such that N < M. If these numbers can be determined,
450 store M in alignp and N in *BITPOSP and return true. Return false if
451 the results are just a conservative approximation.
453 If EXP is not a pointer, false is returned too. */
455 bool
456 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
457 unsigned HOST_WIDE_INT *bitposp)
459 STRIP_NOPS (exp);
461 if (TREE_CODE (exp) == ADDR_EXPR)
462 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
463 alignp, bitposp, true);
464 else if (TREE_CODE (exp) == SSA_NAME
465 && POINTER_TYPE_P (TREE_TYPE (exp)))
467 unsigned int ptr_align, ptr_misalign;
468 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
470 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
472 *bitposp = ptr_misalign * BITS_PER_UNIT;
473 *alignp = ptr_align * BITS_PER_UNIT;
474 /* We cannot really tell whether this result is an approximation. */
475 return true;
477 else
479 *bitposp = 0;
480 *alignp = BITS_PER_UNIT;
481 return false;
484 else if (TREE_CODE (exp) == INTEGER_CST)
486 *alignp = BIGGEST_ALIGNMENT;
487 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
488 & (BIGGEST_ALIGNMENT - 1));
489 return true;
492 *bitposp = 0;
493 *alignp = BITS_PER_UNIT;
494 return false;
497 /* Return the alignment in bits of EXP, a pointer valued expression.
498 The alignment returned is, by default, the alignment of the thing that
499 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
501 Otherwise, look at the expression to see if we can do better, i.e., if the
502 expression is actually pointing at an object whose alignment is tighter. */
504 unsigned int
505 get_pointer_alignment (tree exp)
507 unsigned HOST_WIDE_INT bitpos = 0;
508 unsigned int align;
510 get_pointer_alignment_1 (exp, &align, &bitpos);
512 /* align and bitpos now specify known low bits of the pointer.
513 ptr & (align - 1) == bitpos. */
515 if (bitpos != 0)
516 align = (bitpos & -bitpos);
518 return align;
521 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
522 way, because it could contain a zero byte in the middle.
523 TREE_STRING_LENGTH is the size of the character array, not the string.
525 ONLY_VALUE should be nonzero if the result is not going to be emitted
526 into the instruction stream and zero if it is going to be expanded.
527 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
528 is returned, otherwise NULL, since
529 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
530 evaluate the side-effects.
532 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
533 accesses. Note that this implies the result is not going to be emitted
534 into the instruction stream.
536 The value returned is of type `ssizetype'.
538 Unfortunately, string_constant can't access the values of const char
539 arrays with initializers, so neither can we do so here. */
541 tree
542 c_strlen (tree src, int only_value)
544 tree offset_node;
545 HOST_WIDE_INT offset;
546 int max;
547 const char *ptr;
548 location_t loc;
550 STRIP_NOPS (src);
551 if (TREE_CODE (src) == COND_EXPR
552 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
554 tree len1, len2;
556 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
557 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
558 if (tree_int_cst_equal (len1, len2))
559 return len1;
562 if (TREE_CODE (src) == COMPOUND_EXPR
563 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
564 return c_strlen (TREE_OPERAND (src, 1), only_value);
566 loc = EXPR_LOC_OR_LOC (src, input_location);
568 src = string_constant (src, &offset_node);
569 if (src == 0)
570 return NULL_TREE;
572 max = TREE_STRING_LENGTH (src) - 1;
573 ptr = TREE_STRING_POINTER (src);
575 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
577 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
578 compute the offset to the following null if we don't know where to
579 start searching for it. */
580 int i;
582 for (i = 0; i < max; i++)
583 if (ptr[i] == 0)
584 return NULL_TREE;
586 /* We don't know the starting offset, but we do know that the string
587 has no internal zero bytes. We can assume that the offset falls
588 within the bounds of the string; otherwise, the programmer deserves
589 what he gets. Subtract the offset from the length of the string,
590 and return that. This would perhaps not be valid if we were dealing
591 with named arrays in addition to literal string constants. */
593 return size_diffop_loc (loc, size_int (max), offset_node);
596 /* We have a known offset into the string. Start searching there for
597 a null character if we can represent it as a single HOST_WIDE_INT. */
598 if (offset_node == 0)
599 offset = 0;
600 else if (! tree_fits_shwi_p (offset_node))
601 offset = -1;
602 else
603 offset = tree_to_shwi (offset_node);
605 /* If the offset is known to be out of bounds, warn, and call strlen at
606 runtime. */
607 if (offset < 0 || offset > max)
609 /* Suppress multiple warnings for propagated constant strings. */
610 if (only_value != 2
611 && !TREE_NO_WARNING (src))
613 warning_at (loc, 0, "offset outside bounds of constant string");
614 TREE_NO_WARNING (src) = 1;
616 return NULL_TREE;
619 /* Use strlen to search for the first zero byte. Since any strings
620 constructed with build_string will have nulls appended, we win even
621 if we get handed something like (char[4])"abcd".
623 Since OFFSET is our starting index into the string, no further
624 calculation is needed. */
625 return ssize_int (strlen (ptr + offset));
628 /* Return a char pointer for a C string if it is a string constant
629 or sum of string constant and integer constant. */
631 const char *
632 c_getstr (tree src)
634 tree offset_node;
636 src = string_constant (src, &offset_node);
637 if (src == 0)
638 return 0;
640 if (offset_node == 0)
641 return TREE_STRING_POINTER (src);
642 else if (!tree_fits_uhwi_p (offset_node)
643 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
644 return 0;
646 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
649 /* Return a constant integer corresponding to target reading
650 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
652 static rtx
653 c_readstr (const char *str, enum machine_mode mode)
655 HOST_WIDE_INT ch;
656 unsigned int i, j;
657 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
659 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
660 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
661 / HOST_BITS_PER_WIDE_INT;
663 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
664 for (i = 0; i < len; i++)
665 tmp[i] = 0;
667 ch = 1;
668 for (i = 0; i < GET_MODE_SIZE (mode); i++)
670 j = i;
671 if (WORDS_BIG_ENDIAN)
672 j = GET_MODE_SIZE (mode) - i - 1;
673 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
674 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
675 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
676 j *= BITS_PER_UNIT;
678 if (ch)
679 ch = (unsigned char) str[i];
680 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
683 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
684 return immed_wide_int_const (c, mode);
687 /* Cast a target constant CST to target CHAR and if that value fits into
688 host char type, return zero and put that value into variable pointed to by
689 P. */
691 static int
692 target_char_cast (tree cst, char *p)
694 unsigned HOST_WIDE_INT val, hostval;
696 if (TREE_CODE (cst) != INTEGER_CST
697 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
698 return 1;
700 /* Do not care if it fits or not right here. */
701 val = TREE_INT_CST_LOW (cst);
703 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
704 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
706 hostval = val;
707 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
708 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
710 if (val != hostval)
711 return 1;
713 *p = hostval;
714 return 0;
717 /* Similar to save_expr, but assumes that arbitrary code is not executed
718 in between the multiple evaluations. In particular, we assume that a
719 non-addressable local variable will not be modified. */
721 static tree
722 builtin_save_expr (tree exp)
724 if (TREE_CODE (exp) == SSA_NAME
725 || (TREE_ADDRESSABLE (exp) == 0
726 && (TREE_CODE (exp) == PARM_DECL
727 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
728 return exp;
730 return save_expr (exp);
733 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
734 times to get the address of either a higher stack frame, or a return
735 address located within it (depending on FNDECL_CODE). */
737 static rtx
738 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
740 int i;
742 #ifdef INITIAL_FRAME_ADDRESS_RTX
743 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
744 #else
745 rtx tem;
747 /* For a zero count with __builtin_return_address, we don't care what
748 frame address we return, because target-specific definitions will
749 override us. Therefore frame pointer elimination is OK, and using
750 the soft frame pointer is OK.
752 For a nonzero count, or a zero count with __builtin_frame_address,
753 we require a stable offset from the current frame pointer to the
754 previous one, so we must use the hard frame pointer, and
755 we must disable frame pointer elimination. */
756 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
757 tem = frame_pointer_rtx;
758 else
760 tem = hard_frame_pointer_rtx;
762 /* Tell reload not to eliminate the frame pointer. */
763 crtl->accesses_prior_frames = 1;
765 #endif
767 /* Some machines need special handling before we can access
768 arbitrary frames. For example, on the SPARC, we must first flush
769 all register windows to the stack. */
770 #ifdef SETUP_FRAME_ADDRESSES
771 if (count > 0)
772 SETUP_FRAME_ADDRESSES ();
773 #endif
775 /* On the SPARC, the return address is not in the frame, it is in a
776 register. There is no way to access it off of the current frame
777 pointer, but it can be accessed off the previous frame pointer by
778 reading the value from the register window save area. */
779 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
780 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
781 count--;
782 #endif
784 /* Scan back COUNT frames to the specified frame. */
785 for (i = 0; i < count; i++)
787 /* Assume the dynamic chain pointer is in the word that the
788 frame address points to, unless otherwise specified. */
789 #ifdef DYNAMIC_CHAIN_ADDRESS
790 tem = DYNAMIC_CHAIN_ADDRESS (tem);
791 #endif
792 tem = memory_address (Pmode, tem);
793 tem = gen_frame_mem (Pmode, tem);
794 tem = copy_to_reg (tem);
797 /* For __builtin_frame_address, return what we've got. But, on
798 the SPARC for example, we may have to add a bias. */
799 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
800 #ifdef FRAME_ADDR_RTX
801 return FRAME_ADDR_RTX (tem);
802 #else
803 return tem;
804 #endif
806 /* For __builtin_return_address, get the return address from that frame. */
807 #ifdef RETURN_ADDR_RTX
808 tem = RETURN_ADDR_RTX (count, tem);
809 #else
810 tem = memory_address (Pmode,
811 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
812 tem = gen_frame_mem (Pmode, tem);
813 #endif
814 return tem;
817 /* Alias set used for setjmp buffer. */
818 static alias_set_type setjmp_alias_set = -1;
820 /* Construct the leading half of a __builtin_setjmp call. Control will
821 return to RECEIVER_LABEL. This is also called directly by the SJLJ
822 exception handling code. */
824 void
825 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
827 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
828 rtx stack_save;
829 rtx mem;
831 if (setjmp_alias_set == -1)
832 setjmp_alias_set = new_alias_set ();
834 buf_addr = convert_memory_address (Pmode, buf_addr);
836 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
838 /* We store the frame pointer and the address of receiver_label in
839 the buffer and use the rest of it for the stack save area, which
840 is machine-dependent. */
842 mem = gen_rtx_MEM (Pmode, buf_addr);
843 set_mem_alias_set (mem, setjmp_alias_set);
844 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
846 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
847 GET_MODE_SIZE (Pmode))),
848 set_mem_alias_set (mem, setjmp_alias_set);
850 emit_move_insn (validize_mem (mem),
851 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
853 stack_save = gen_rtx_MEM (sa_mode,
854 plus_constant (Pmode, buf_addr,
855 2 * GET_MODE_SIZE (Pmode)));
856 set_mem_alias_set (stack_save, setjmp_alias_set);
857 emit_stack_save (SAVE_NONLOCAL, &stack_save);
859 /* If there is further processing to do, do it. */
860 #ifdef HAVE_builtin_setjmp_setup
861 if (HAVE_builtin_setjmp_setup)
862 emit_insn (gen_builtin_setjmp_setup (buf_addr));
863 #endif
865 /* We have a nonlocal label. */
866 cfun->has_nonlocal_label = 1;
869 /* Construct the trailing part of a __builtin_setjmp call. This is
870 also called directly by the SJLJ exception handling code.
871 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
873 void
874 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
876 rtx chain;
878 /* Mark the FP as used when we get here, so we have to make sure it's
879 marked as used by this function. */
880 emit_use (hard_frame_pointer_rtx);
882 /* Mark the static chain as clobbered here so life information
883 doesn't get messed up for it. */
884 chain = targetm.calls.static_chain (current_function_decl, true);
885 if (chain && REG_P (chain))
886 emit_clobber (chain);
888 /* Now put in the code to restore the frame pointer, and argument
889 pointer, if needed. */
890 #ifdef HAVE_nonlocal_goto
891 if (! HAVE_nonlocal_goto)
892 #endif
894 /* First adjust our frame pointer to its actual value. It was
895 previously set to the start of the virtual area corresponding to
896 the stacked variables when we branched here and now needs to be
897 adjusted to the actual hardware fp value.
899 Assignments to virtual registers are converted by
900 instantiate_virtual_regs into the corresponding assignment
901 to the underlying register (fp in this case) that makes
902 the original assignment true.
903 So the following insn will actually be decrementing fp by
904 STARTING_FRAME_OFFSET. */
905 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
907 /* Restoring the frame pointer also modifies the hard frame pointer.
908 Mark it used (so that the previous assignment remains live once
909 the frame pointer is eliminated) and clobbered (to represent the
910 implicit update from the assignment). */
911 emit_use (hard_frame_pointer_rtx);
912 emit_clobber (hard_frame_pointer_rtx);
915 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
916 if (fixed_regs[ARG_POINTER_REGNUM])
918 #ifdef ELIMINABLE_REGS
919 /* If the argument pointer can be eliminated in favor of the
920 frame pointer, we don't need to restore it. We assume here
921 that if such an elimination is present, it can always be used.
922 This is the case on all known machines; if we don't make this
923 assumption, we do unnecessary saving on many machines. */
924 size_t i;
925 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
927 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
928 if (elim_regs[i].from == ARG_POINTER_REGNUM
929 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
930 break;
932 if (i == ARRAY_SIZE (elim_regs))
933 #endif
935 /* Now restore our arg pointer from the address at which it
936 was saved in our stack frame. */
937 emit_move_insn (crtl->args.internal_arg_pointer,
938 copy_to_reg (get_arg_pointer_save_area ()));
941 #endif
943 #ifdef HAVE_builtin_setjmp_receiver
944 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
945 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
946 else
947 #endif
948 #ifdef HAVE_nonlocal_goto_receiver
949 if (HAVE_nonlocal_goto_receiver)
950 emit_insn (gen_nonlocal_goto_receiver ());
951 else
952 #endif
953 { /* Nothing */ }
955 /* We must not allow the code we just generated to be reordered by
956 scheduling. Specifically, the update of the frame pointer must
957 happen immediately, not later. */
958 emit_insn (gen_blockage ());
961 /* __builtin_longjmp is passed a pointer to an array of five words (not
962 all will be used on all machines). It operates similarly to the C
963 library function of the same name, but is more efficient. Much of
964 the code below is copied from the handling of non-local gotos. */
966 static void
967 expand_builtin_longjmp (rtx buf_addr, rtx value)
969 rtx fp, lab, stack, insn, last;
970 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
972 /* DRAP is needed for stack realign if longjmp is expanded to current
973 function */
974 if (SUPPORTS_STACK_ALIGNMENT)
975 crtl->need_drap = true;
977 if (setjmp_alias_set == -1)
978 setjmp_alias_set = new_alias_set ();
980 buf_addr = convert_memory_address (Pmode, buf_addr);
982 buf_addr = force_reg (Pmode, buf_addr);
984 /* We require that the user must pass a second argument of 1, because
985 that is what builtin_setjmp will return. */
986 gcc_assert (value == const1_rtx);
988 last = get_last_insn ();
989 #ifdef HAVE_builtin_longjmp
990 if (HAVE_builtin_longjmp)
991 emit_insn (gen_builtin_longjmp (buf_addr));
992 else
993 #endif
995 fp = gen_rtx_MEM (Pmode, buf_addr);
996 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
997 GET_MODE_SIZE (Pmode)));
999 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1000 2 * GET_MODE_SIZE (Pmode)));
1001 set_mem_alias_set (fp, setjmp_alias_set);
1002 set_mem_alias_set (lab, setjmp_alias_set);
1003 set_mem_alias_set (stack, setjmp_alias_set);
1005 /* Pick up FP, label, and SP from the block and jump. This code is
1006 from expand_goto in stmt.c; see there for detailed comments. */
1007 #ifdef HAVE_nonlocal_goto
1008 if (HAVE_nonlocal_goto)
1009 /* We have to pass a value to the nonlocal_goto pattern that will
1010 get copied into the static_chain pointer, but it does not matter
1011 what that value is, because builtin_setjmp does not use it. */
1012 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1013 else
1014 #endif
1016 lab = copy_to_reg (lab);
1018 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1019 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1021 emit_move_insn (hard_frame_pointer_rtx, fp);
1022 emit_stack_restore (SAVE_NONLOCAL, stack);
1024 emit_use (hard_frame_pointer_rtx);
1025 emit_use (stack_pointer_rtx);
1026 emit_indirect_jump (lab);
1030 /* Search backwards and mark the jump insn as a non-local goto.
1031 Note that this precludes the use of __builtin_longjmp to a
1032 __builtin_setjmp target in the same function. However, we've
1033 already cautioned the user that these functions are for
1034 internal exception handling use only. */
1035 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1037 gcc_assert (insn != last);
1039 if (JUMP_P (insn))
1041 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1042 break;
1044 else if (CALL_P (insn))
1045 break;
1049 static inline bool
1050 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1052 return (iter->i < iter->n);
1055 /* This function validates the types of a function call argument list
1056 against a specified list of tree_codes. If the last specifier is a 0,
1057 that represents an ellipses, otherwise the last specifier must be a
1058 VOID_TYPE. */
1060 static bool
1061 validate_arglist (const_tree callexpr, ...)
1063 enum tree_code code;
1064 bool res = 0;
1065 va_list ap;
1066 const_call_expr_arg_iterator iter;
1067 const_tree arg;
1069 va_start (ap, callexpr);
1070 init_const_call_expr_arg_iterator (callexpr, &iter);
1074 code = (enum tree_code) va_arg (ap, int);
1075 switch (code)
1077 case 0:
1078 /* This signifies an ellipses, any further arguments are all ok. */
1079 res = true;
1080 goto end;
1081 case VOID_TYPE:
1082 /* This signifies an endlink, if no arguments remain, return
1083 true, otherwise return false. */
1084 res = !more_const_call_expr_args_p (&iter);
1085 goto end;
1086 default:
1087 /* If no parameters remain or the parameter's code does not
1088 match the specified code, return false. Otherwise continue
1089 checking any remaining arguments. */
1090 arg = next_const_call_expr_arg (&iter);
1091 if (!validate_arg (arg, code))
1092 goto end;
1093 break;
1096 while (1);
1098 /* We need gotos here since we can only have one VA_CLOSE in a
1099 function. */
1100 end: ;
1101 va_end (ap);
1103 return res;
1106 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1107 and the address of the save area. */
1109 static rtx
1110 expand_builtin_nonlocal_goto (tree exp)
1112 tree t_label, t_save_area;
1113 rtx r_label, r_save_area, r_fp, r_sp, insn;
1115 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1116 return NULL_RTX;
1118 t_label = CALL_EXPR_ARG (exp, 0);
1119 t_save_area = CALL_EXPR_ARG (exp, 1);
1121 r_label = expand_normal (t_label);
1122 r_label = convert_memory_address (Pmode, r_label);
1123 r_save_area = expand_normal (t_save_area);
1124 r_save_area = convert_memory_address (Pmode, r_save_area);
1125 /* Copy the address of the save location to a register just in case it was
1126 based on the frame pointer. */
1127 r_save_area = copy_to_reg (r_save_area);
1128 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1129 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1130 plus_constant (Pmode, r_save_area,
1131 GET_MODE_SIZE (Pmode)));
1133 crtl->has_nonlocal_goto = 1;
1135 #ifdef HAVE_nonlocal_goto
1136 /* ??? We no longer need to pass the static chain value, afaik. */
1137 if (HAVE_nonlocal_goto)
1138 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1139 else
1140 #endif
1142 r_label = copy_to_reg (r_label);
1144 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1145 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1147 /* Restore frame pointer for containing function. */
1148 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1149 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1151 /* USE of hard_frame_pointer_rtx added for consistency;
1152 not clear if really needed. */
1153 emit_use (hard_frame_pointer_rtx);
1154 emit_use (stack_pointer_rtx);
1156 /* If the architecture is using a GP register, we must
1157 conservatively assume that the target function makes use of it.
1158 The prologue of functions with nonlocal gotos must therefore
1159 initialize the GP register to the appropriate value, and we
1160 must then make sure that this value is live at the point
1161 of the jump. (Note that this doesn't necessarily apply
1162 to targets with a nonlocal_goto pattern; they are free
1163 to implement it in their own way. Note also that this is
1164 a no-op if the GP register is a global invariant.) */
1165 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1166 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1167 emit_use (pic_offset_table_rtx);
1169 emit_indirect_jump (r_label);
1172 /* Search backwards to the jump insn and mark it as a
1173 non-local goto. */
1174 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1176 if (JUMP_P (insn))
1178 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1179 break;
1181 else if (CALL_P (insn))
1182 break;
1185 return const0_rtx;
1188 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1189 (not all will be used on all machines) that was passed to __builtin_setjmp.
1190 It updates the stack pointer in that block to correspond to the current
1191 stack pointer. */
1193 static void
1194 expand_builtin_update_setjmp_buf (rtx buf_addr)
1196 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1197 rtx stack_save
1198 = gen_rtx_MEM (sa_mode,
1199 memory_address
1200 (sa_mode,
1201 plus_constant (Pmode, buf_addr,
1202 2 * GET_MODE_SIZE (Pmode))));
1204 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1207 /* Expand a call to __builtin_prefetch. For a target that does not support
1208 data prefetch, evaluate the memory address argument in case it has side
1209 effects. */
1211 static void
1212 expand_builtin_prefetch (tree exp)
1214 tree arg0, arg1, arg2;
1215 int nargs;
1216 rtx op0, op1, op2;
1218 if (!validate_arglist (exp, POINTER_TYPE, 0))
1219 return;
1221 arg0 = CALL_EXPR_ARG (exp, 0);
1223 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1224 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1225 locality). */
1226 nargs = call_expr_nargs (exp);
1227 if (nargs > 1)
1228 arg1 = CALL_EXPR_ARG (exp, 1);
1229 else
1230 arg1 = integer_zero_node;
1231 if (nargs > 2)
1232 arg2 = CALL_EXPR_ARG (exp, 2);
1233 else
1234 arg2 = integer_three_node;
1236 /* Argument 0 is an address. */
1237 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1239 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1240 if (TREE_CODE (arg1) != INTEGER_CST)
1242 error ("second argument to %<__builtin_prefetch%> must be a constant");
1243 arg1 = integer_zero_node;
1245 op1 = expand_normal (arg1);
1246 /* Argument 1 must be either zero or one. */
1247 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1249 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1250 " using zero");
1251 op1 = const0_rtx;
1254 /* Argument 2 (locality) must be a compile-time constant int. */
1255 if (TREE_CODE (arg2) != INTEGER_CST)
1257 error ("third argument to %<__builtin_prefetch%> must be a constant");
1258 arg2 = integer_zero_node;
1260 op2 = expand_normal (arg2);
1261 /* Argument 2 must be 0, 1, 2, or 3. */
1262 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1264 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1265 op2 = const0_rtx;
1268 #ifdef HAVE_prefetch
1269 if (HAVE_prefetch)
1271 struct expand_operand ops[3];
1273 create_address_operand (&ops[0], op0);
1274 create_integer_operand (&ops[1], INTVAL (op1));
1275 create_integer_operand (&ops[2], INTVAL (op2));
1276 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1277 return;
1279 #endif
1281 /* Don't do anything with direct references to volatile memory, but
1282 generate code to handle other side effects. */
1283 if (!MEM_P (op0) && side_effects_p (op0))
1284 emit_insn (op0);
1287 /* Get a MEM rtx for expression EXP which is the address of an operand
1288 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1289 the maximum length of the block of memory that might be accessed or
1290 NULL if unknown. */
1292 static rtx
1293 get_memory_rtx (tree exp, tree len)
1295 tree orig_exp = exp;
1296 rtx addr, mem;
1298 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1299 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1300 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1301 exp = TREE_OPERAND (exp, 0);
1303 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1304 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1306 /* Get an expression we can use to find the attributes to assign to MEM.
1307 First remove any nops. */
1308 while (CONVERT_EXPR_P (exp)
1309 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1310 exp = TREE_OPERAND (exp, 0);
1312 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1313 (as builtin stringops may alias with anything). */
1314 exp = fold_build2 (MEM_REF,
1315 build_array_type (char_type_node,
1316 build_range_type (sizetype,
1317 size_one_node, len)),
1318 exp, build_int_cst (ptr_type_node, 0));
1320 /* If the MEM_REF has no acceptable address, try to get the base object
1321 from the original address we got, and build an all-aliasing
1322 unknown-sized access to that one. */
1323 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1324 set_mem_attributes (mem, exp, 0);
1325 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1326 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1327 0))))
1329 exp = build_fold_addr_expr (exp);
1330 exp = fold_build2 (MEM_REF,
1331 build_array_type (char_type_node,
1332 build_range_type (sizetype,
1333 size_zero_node,
1334 NULL)),
1335 exp, build_int_cst (ptr_type_node, 0));
1336 set_mem_attributes (mem, exp, 0);
1338 set_mem_alias_set (mem, 0);
1339 return mem;
1342 /* Built-in functions to perform an untyped call and return. */
1344 #define apply_args_mode \
1345 (this_target_builtins->x_apply_args_mode)
1346 #define apply_result_mode \
1347 (this_target_builtins->x_apply_result_mode)
1349 /* Return the size required for the block returned by __builtin_apply_args,
1350 and initialize apply_args_mode. */
1352 static int
1353 apply_args_size (void)
1355 static int size = -1;
1356 int align;
1357 unsigned int regno;
1358 enum machine_mode mode;
1360 /* The values computed by this function never change. */
1361 if (size < 0)
1363 /* The first value is the incoming arg-pointer. */
1364 size = GET_MODE_SIZE (Pmode);
1366 /* The second value is the structure value address unless this is
1367 passed as an "invisible" first argument. */
1368 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1369 size += GET_MODE_SIZE (Pmode);
1371 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1372 if (FUNCTION_ARG_REGNO_P (regno))
1374 mode = targetm.calls.get_raw_arg_mode (regno);
1376 gcc_assert (mode != VOIDmode);
1378 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1379 if (size % align != 0)
1380 size = CEIL (size, align) * align;
1381 size += GET_MODE_SIZE (mode);
1382 apply_args_mode[regno] = mode;
1384 else
1386 apply_args_mode[regno] = VOIDmode;
1389 return size;
1392 /* Return the size required for the block returned by __builtin_apply,
1393 and initialize apply_result_mode. */
1395 static int
1396 apply_result_size (void)
1398 static int size = -1;
1399 int align, regno;
1400 enum machine_mode mode;
1402 /* The values computed by this function never change. */
1403 if (size < 0)
1405 size = 0;
1407 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1408 if (targetm.calls.function_value_regno_p (regno))
1410 mode = targetm.calls.get_raw_result_mode (regno);
1412 gcc_assert (mode != VOIDmode);
1414 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1415 if (size % align != 0)
1416 size = CEIL (size, align) * align;
1417 size += GET_MODE_SIZE (mode);
1418 apply_result_mode[regno] = mode;
1420 else
1421 apply_result_mode[regno] = VOIDmode;
1423 /* Allow targets that use untyped_call and untyped_return to override
1424 the size so that machine-specific information can be stored here. */
1425 #ifdef APPLY_RESULT_SIZE
1426 size = APPLY_RESULT_SIZE;
1427 #endif
1429 return size;
1432 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1433 /* Create a vector describing the result block RESULT. If SAVEP is true,
1434 the result block is used to save the values; otherwise it is used to
1435 restore the values. */
1437 static rtx
1438 result_vector (int savep, rtx result)
1440 int regno, size, align, nelts;
1441 enum machine_mode mode;
1442 rtx reg, mem;
1443 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1445 size = nelts = 0;
1446 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1447 if ((mode = apply_result_mode[regno]) != VOIDmode)
1449 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1450 if (size % align != 0)
1451 size = CEIL (size, align) * align;
1452 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1453 mem = adjust_address (result, mode, size);
1454 savevec[nelts++] = (savep
1455 ? gen_rtx_SET (VOIDmode, mem, reg)
1456 : gen_rtx_SET (VOIDmode, reg, mem));
1457 size += GET_MODE_SIZE (mode);
1459 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1461 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1463 /* Save the state required to perform an untyped call with the same
1464 arguments as were passed to the current function. */
1466 static rtx
1467 expand_builtin_apply_args_1 (void)
1469 rtx registers, tem;
1470 int size, align, regno;
1471 enum machine_mode mode;
1472 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1474 /* Create a block where the arg-pointer, structure value address,
1475 and argument registers can be saved. */
1476 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1478 /* Walk past the arg-pointer and structure value address. */
1479 size = GET_MODE_SIZE (Pmode);
1480 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1481 size += GET_MODE_SIZE (Pmode);
1483 /* Save each register used in calling a function to the block. */
1484 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1485 if ((mode = apply_args_mode[regno]) != VOIDmode)
1487 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1488 if (size % align != 0)
1489 size = CEIL (size, align) * align;
1491 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1493 emit_move_insn (adjust_address (registers, mode, size), tem);
1494 size += GET_MODE_SIZE (mode);
1497 /* Save the arg pointer to the block. */
1498 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1499 #ifdef STACK_GROWS_DOWNWARD
1500 /* We need the pointer as the caller actually passed them to us, not
1501 as we might have pretended they were passed. Make sure it's a valid
1502 operand, as emit_move_insn isn't expected to handle a PLUS. */
1504 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1505 NULL_RTX);
1506 #endif
1507 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1509 size = GET_MODE_SIZE (Pmode);
1511 /* Save the structure value address unless this is passed as an
1512 "invisible" first argument. */
1513 if (struct_incoming_value)
1515 emit_move_insn (adjust_address (registers, Pmode, size),
1516 copy_to_reg (struct_incoming_value));
1517 size += GET_MODE_SIZE (Pmode);
1520 /* Return the address of the block. */
1521 return copy_addr_to_reg (XEXP (registers, 0));
1524 /* __builtin_apply_args returns block of memory allocated on
1525 the stack into which is stored the arg pointer, structure
1526 value address, static chain, and all the registers that might
1527 possibly be used in performing a function call. The code is
1528 moved to the start of the function so the incoming values are
1529 saved. */
1531 static rtx
1532 expand_builtin_apply_args (void)
1534 /* Don't do __builtin_apply_args more than once in a function.
1535 Save the result of the first call and reuse it. */
1536 if (apply_args_value != 0)
1537 return apply_args_value;
1539 /* When this function is called, it means that registers must be
1540 saved on entry to this function. So we migrate the
1541 call to the first insn of this function. */
1542 rtx temp;
1543 rtx seq;
1545 start_sequence ();
1546 temp = expand_builtin_apply_args_1 ();
1547 seq = get_insns ();
1548 end_sequence ();
1550 apply_args_value = temp;
1552 /* Put the insns after the NOTE that starts the function.
1553 If this is inside a start_sequence, make the outer-level insn
1554 chain current, so the code is placed at the start of the
1555 function. If internal_arg_pointer is a non-virtual pseudo,
1556 it needs to be placed after the function that initializes
1557 that pseudo. */
1558 push_topmost_sequence ();
1559 if (REG_P (crtl->args.internal_arg_pointer)
1560 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1561 emit_insn_before (seq, parm_birth_insn);
1562 else
1563 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1564 pop_topmost_sequence ();
1565 return temp;
1569 /* Perform an untyped call and save the state required to perform an
1570 untyped return of whatever value was returned by the given function. */
1572 static rtx
1573 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1575 int size, align, regno;
1576 enum machine_mode mode;
1577 rtx incoming_args, result, reg, dest, src, call_insn;
1578 rtx old_stack_level = 0;
1579 rtx call_fusage = 0;
1580 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1582 arguments = convert_memory_address (Pmode, arguments);
1584 /* Create a block where the return registers can be saved. */
1585 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1587 /* Fetch the arg pointer from the ARGUMENTS block. */
1588 incoming_args = gen_reg_rtx (Pmode);
1589 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1590 #ifndef STACK_GROWS_DOWNWARD
1591 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1592 incoming_args, 0, OPTAB_LIB_WIDEN);
1593 #endif
1595 /* Push a new argument block and copy the arguments. Do not allow
1596 the (potential) memcpy call below to interfere with our stack
1597 manipulations. */
1598 do_pending_stack_adjust ();
1599 NO_DEFER_POP;
1601 /* Save the stack with nonlocal if available. */
1602 #ifdef HAVE_save_stack_nonlocal
1603 if (HAVE_save_stack_nonlocal)
1604 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1605 else
1606 #endif
1607 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1609 /* Allocate a block of memory onto the stack and copy the memory
1610 arguments to the outgoing arguments address. We can pass TRUE
1611 as the 4th argument because we just saved the stack pointer
1612 and will restore it right after the call. */
1613 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1615 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1616 may have already set current_function_calls_alloca to true.
1617 current_function_calls_alloca won't be set if argsize is zero,
1618 so we have to guarantee need_drap is true here. */
1619 if (SUPPORTS_STACK_ALIGNMENT)
1620 crtl->need_drap = true;
1622 dest = virtual_outgoing_args_rtx;
1623 #ifndef STACK_GROWS_DOWNWARD
1624 if (CONST_INT_P (argsize))
1625 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1626 else
1627 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1628 #endif
1629 dest = gen_rtx_MEM (BLKmode, dest);
1630 set_mem_align (dest, PARM_BOUNDARY);
1631 src = gen_rtx_MEM (BLKmode, incoming_args);
1632 set_mem_align (src, PARM_BOUNDARY);
1633 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1635 /* Refer to the argument block. */
1636 apply_args_size ();
1637 arguments = gen_rtx_MEM (BLKmode, arguments);
1638 set_mem_align (arguments, PARM_BOUNDARY);
1640 /* Walk past the arg-pointer and structure value address. */
1641 size = GET_MODE_SIZE (Pmode);
1642 if (struct_value)
1643 size += GET_MODE_SIZE (Pmode);
1645 /* Restore each of the registers previously saved. Make USE insns
1646 for each of these registers for use in making the call. */
1647 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1648 if ((mode = apply_args_mode[regno]) != VOIDmode)
1650 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1651 if (size % align != 0)
1652 size = CEIL (size, align) * align;
1653 reg = gen_rtx_REG (mode, regno);
1654 emit_move_insn (reg, adjust_address (arguments, mode, size));
1655 use_reg (&call_fusage, reg);
1656 size += GET_MODE_SIZE (mode);
1659 /* Restore the structure value address unless this is passed as an
1660 "invisible" first argument. */
1661 size = GET_MODE_SIZE (Pmode);
1662 if (struct_value)
1664 rtx value = gen_reg_rtx (Pmode);
1665 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1666 emit_move_insn (struct_value, value);
1667 if (REG_P (struct_value))
1668 use_reg (&call_fusage, struct_value);
1669 size += GET_MODE_SIZE (Pmode);
1672 /* All arguments and registers used for the call are set up by now! */
1673 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1675 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1676 and we don't want to load it into a register as an optimization,
1677 because prepare_call_address already did it if it should be done. */
1678 if (GET_CODE (function) != SYMBOL_REF)
1679 function = memory_address (FUNCTION_MODE, function);
1681 /* Generate the actual call instruction and save the return value. */
1682 #ifdef HAVE_untyped_call
1683 if (HAVE_untyped_call)
1684 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1685 result, result_vector (1, result)));
1686 else
1687 #endif
1688 #ifdef HAVE_call_value
1689 if (HAVE_call_value)
1691 rtx valreg = 0;
1693 /* Locate the unique return register. It is not possible to
1694 express a call that sets more than one return register using
1695 call_value; use untyped_call for that. In fact, untyped_call
1696 only needs to save the return registers in the given block. */
1697 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1698 if ((mode = apply_result_mode[regno]) != VOIDmode)
1700 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1702 valreg = gen_rtx_REG (mode, regno);
1705 emit_call_insn (GEN_CALL_VALUE (valreg,
1706 gen_rtx_MEM (FUNCTION_MODE, function),
1707 const0_rtx, NULL_RTX, const0_rtx));
1709 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1711 else
1712 #endif
1713 gcc_unreachable ();
1715 /* Find the CALL insn we just emitted, and attach the register usage
1716 information. */
1717 call_insn = last_call_insn ();
1718 add_function_usage_to (call_insn, call_fusage);
1720 /* Restore the stack. */
1721 #ifdef HAVE_save_stack_nonlocal
1722 if (HAVE_save_stack_nonlocal)
1723 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1724 else
1725 #endif
1726 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1727 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1729 OK_DEFER_POP;
1731 /* Return the address of the result block. */
1732 result = copy_addr_to_reg (XEXP (result, 0));
1733 return convert_memory_address (ptr_mode, result);
1736 /* Perform an untyped return. */
1738 static void
1739 expand_builtin_return (rtx result)
1741 int size, align, regno;
1742 enum machine_mode mode;
1743 rtx reg;
1744 rtx call_fusage = 0;
1746 result = convert_memory_address (Pmode, result);
1748 apply_result_size ();
1749 result = gen_rtx_MEM (BLKmode, result);
1751 #ifdef HAVE_untyped_return
1752 if (HAVE_untyped_return)
1754 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1755 emit_barrier ();
1756 return;
1758 #endif
1760 /* Restore the return value and note that each value is used. */
1761 size = 0;
1762 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1763 if ((mode = apply_result_mode[regno]) != VOIDmode)
1765 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1766 if (size % align != 0)
1767 size = CEIL (size, align) * align;
1768 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1769 emit_move_insn (reg, adjust_address (result, mode, size));
1771 push_to_sequence (call_fusage);
1772 emit_use (reg);
1773 call_fusage = get_insns ();
1774 end_sequence ();
1775 size += GET_MODE_SIZE (mode);
1778 /* Put the USE insns before the return. */
1779 emit_insn (call_fusage);
1781 /* Return whatever values was restored by jumping directly to the end
1782 of the function. */
1783 expand_naked_return ();
1786 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1788 static enum type_class
1789 type_to_class (tree type)
1791 switch (TREE_CODE (type))
1793 case VOID_TYPE: return void_type_class;
1794 case INTEGER_TYPE: return integer_type_class;
1795 case ENUMERAL_TYPE: return enumeral_type_class;
1796 case BOOLEAN_TYPE: return boolean_type_class;
1797 case POINTER_TYPE: return pointer_type_class;
1798 case REFERENCE_TYPE: return reference_type_class;
1799 case OFFSET_TYPE: return offset_type_class;
1800 case REAL_TYPE: return real_type_class;
1801 case COMPLEX_TYPE: return complex_type_class;
1802 case FUNCTION_TYPE: return function_type_class;
1803 case METHOD_TYPE: return method_type_class;
1804 case RECORD_TYPE: return record_type_class;
1805 case UNION_TYPE:
1806 case QUAL_UNION_TYPE: return union_type_class;
1807 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1808 ? string_type_class : array_type_class);
1809 case LANG_TYPE: return lang_type_class;
1810 default: return no_type_class;
1814 /* Expand a call EXP to __builtin_classify_type. */
1816 static rtx
1817 expand_builtin_classify_type (tree exp)
1819 if (call_expr_nargs (exp))
1820 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1821 return GEN_INT (no_type_class);
1824 /* This helper macro, meant to be used in mathfn_built_in below,
1825 determines which among a set of three builtin math functions is
1826 appropriate for a given type mode. The `F' and `L' cases are
1827 automatically generated from the `double' case. */
1828 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1829 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1830 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1831 fcodel = BUILT_IN_MATHFN##L ; break;
1832 /* Similar to above, but appends _R after any F/L suffix. */
1833 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1834 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1835 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1836 fcodel = BUILT_IN_MATHFN##L_R ; break;
1838 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1839 if available. If IMPLICIT is true use the implicit builtin declaration,
1840 otherwise use the explicit declaration. If we can't do the conversion,
1841 return zero. */
1843 static tree
1844 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1846 enum built_in_function fcode, fcodef, fcodel, fcode2;
1848 switch (fn)
1850 CASE_MATHFN (BUILT_IN_ACOS)
1851 CASE_MATHFN (BUILT_IN_ACOSH)
1852 CASE_MATHFN (BUILT_IN_ASIN)
1853 CASE_MATHFN (BUILT_IN_ASINH)
1854 CASE_MATHFN (BUILT_IN_ATAN)
1855 CASE_MATHFN (BUILT_IN_ATAN2)
1856 CASE_MATHFN (BUILT_IN_ATANH)
1857 CASE_MATHFN (BUILT_IN_CBRT)
1858 CASE_MATHFN (BUILT_IN_CEIL)
1859 CASE_MATHFN (BUILT_IN_CEXPI)
1860 CASE_MATHFN (BUILT_IN_COPYSIGN)
1861 CASE_MATHFN (BUILT_IN_COS)
1862 CASE_MATHFN (BUILT_IN_COSH)
1863 CASE_MATHFN (BUILT_IN_DREM)
1864 CASE_MATHFN (BUILT_IN_ERF)
1865 CASE_MATHFN (BUILT_IN_ERFC)
1866 CASE_MATHFN (BUILT_IN_EXP)
1867 CASE_MATHFN (BUILT_IN_EXP10)
1868 CASE_MATHFN (BUILT_IN_EXP2)
1869 CASE_MATHFN (BUILT_IN_EXPM1)
1870 CASE_MATHFN (BUILT_IN_FABS)
1871 CASE_MATHFN (BUILT_IN_FDIM)
1872 CASE_MATHFN (BUILT_IN_FLOOR)
1873 CASE_MATHFN (BUILT_IN_FMA)
1874 CASE_MATHFN (BUILT_IN_FMAX)
1875 CASE_MATHFN (BUILT_IN_FMIN)
1876 CASE_MATHFN (BUILT_IN_FMOD)
1877 CASE_MATHFN (BUILT_IN_FREXP)
1878 CASE_MATHFN (BUILT_IN_GAMMA)
1879 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1880 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1881 CASE_MATHFN (BUILT_IN_HYPOT)
1882 CASE_MATHFN (BUILT_IN_ILOGB)
1883 CASE_MATHFN (BUILT_IN_ICEIL)
1884 CASE_MATHFN (BUILT_IN_IFLOOR)
1885 CASE_MATHFN (BUILT_IN_INF)
1886 CASE_MATHFN (BUILT_IN_IRINT)
1887 CASE_MATHFN (BUILT_IN_IROUND)
1888 CASE_MATHFN (BUILT_IN_ISINF)
1889 CASE_MATHFN (BUILT_IN_J0)
1890 CASE_MATHFN (BUILT_IN_J1)
1891 CASE_MATHFN (BUILT_IN_JN)
1892 CASE_MATHFN (BUILT_IN_LCEIL)
1893 CASE_MATHFN (BUILT_IN_LDEXP)
1894 CASE_MATHFN (BUILT_IN_LFLOOR)
1895 CASE_MATHFN (BUILT_IN_LGAMMA)
1896 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1897 CASE_MATHFN (BUILT_IN_LLCEIL)
1898 CASE_MATHFN (BUILT_IN_LLFLOOR)
1899 CASE_MATHFN (BUILT_IN_LLRINT)
1900 CASE_MATHFN (BUILT_IN_LLROUND)
1901 CASE_MATHFN (BUILT_IN_LOG)
1902 CASE_MATHFN (BUILT_IN_LOG10)
1903 CASE_MATHFN (BUILT_IN_LOG1P)
1904 CASE_MATHFN (BUILT_IN_LOG2)
1905 CASE_MATHFN (BUILT_IN_LOGB)
1906 CASE_MATHFN (BUILT_IN_LRINT)
1907 CASE_MATHFN (BUILT_IN_LROUND)
1908 CASE_MATHFN (BUILT_IN_MODF)
1909 CASE_MATHFN (BUILT_IN_NAN)
1910 CASE_MATHFN (BUILT_IN_NANS)
1911 CASE_MATHFN (BUILT_IN_NEARBYINT)
1912 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1913 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1914 CASE_MATHFN (BUILT_IN_POW)
1915 CASE_MATHFN (BUILT_IN_POWI)
1916 CASE_MATHFN (BUILT_IN_POW10)
1917 CASE_MATHFN (BUILT_IN_REMAINDER)
1918 CASE_MATHFN (BUILT_IN_REMQUO)
1919 CASE_MATHFN (BUILT_IN_RINT)
1920 CASE_MATHFN (BUILT_IN_ROUND)
1921 CASE_MATHFN (BUILT_IN_SCALB)
1922 CASE_MATHFN (BUILT_IN_SCALBLN)
1923 CASE_MATHFN (BUILT_IN_SCALBN)
1924 CASE_MATHFN (BUILT_IN_SIGNBIT)
1925 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1926 CASE_MATHFN (BUILT_IN_SIN)
1927 CASE_MATHFN (BUILT_IN_SINCOS)
1928 CASE_MATHFN (BUILT_IN_SINH)
1929 CASE_MATHFN (BUILT_IN_SQRT)
1930 CASE_MATHFN (BUILT_IN_TAN)
1931 CASE_MATHFN (BUILT_IN_TANH)
1932 CASE_MATHFN (BUILT_IN_TGAMMA)
1933 CASE_MATHFN (BUILT_IN_TRUNC)
1934 CASE_MATHFN (BUILT_IN_Y0)
1935 CASE_MATHFN (BUILT_IN_Y1)
1936 CASE_MATHFN (BUILT_IN_YN)
1938 default:
1939 return NULL_TREE;
1942 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1943 fcode2 = fcode;
1944 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1945 fcode2 = fcodef;
1946 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1947 fcode2 = fcodel;
1948 else
1949 return NULL_TREE;
1951 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1952 return NULL_TREE;
1954 return builtin_decl_explicit (fcode2);
1957 /* Like mathfn_built_in_1(), but always use the implicit array. */
1959 tree
1960 mathfn_built_in (tree type, enum built_in_function fn)
1962 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1965 /* If errno must be maintained, expand the RTL to check if the result,
1966 TARGET, of a built-in function call, EXP, is NaN, and if so set
1967 errno to EDOM. */
1969 static void
1970 expand_errno_check (tree exp, rtx target)
1972 rtx lab = gen_label_rtx ();
1974 /* Test the result; if it is NaN, set errno=EDOM because
1975 the argument was not in the domain. */
1976 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1977 NULL_RTX, NULL_RTX, lab,
1978 /* The jump is very likely. */
1979 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1981 #ifdef TARGET_EDOM
1982 /* If this built-in doesn't throw an exception, set errno directly. */
1983 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1985 #ifdef GEN_ERRNO_RTX
1986 rtx errno_rtx = GEN_ERRNO_RTX;
1987 #else
1988 rtx errno_rtx
1989 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1990 #endif
1991 emit_move_insn (errno_rtx,
1992 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1993 emit_label (lab);
1994 return;
1996 #endif
1998 /* Make sure the library call isn't expanded as a tail call. */
1999 CALL_EXPR_TAILCALL (exp) = 0;
2001 /* We can't set errno=EDOM directly; let the library call do it.
2002 Pop the arguments right away in case the call gets deleted. */
2003 NO_DEFER_POP;
2004 expand_call (exp, target, 0);
2005 OK_DEFER_POP;
2006 emit_label (lab);
2009 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2010 Return NULL_RTX if a normal call should be emitted rather than expanding
2011 the function in-line. EXP is the expression that is a call to the builtin
2012 function; if convenient, the result should be placed in TARGET.
2013 SUBTARGET may be used as the target for computing one of EXP's operands. */
2015 static rtx
2016 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2018 optab builtin_optab;
2019 rtx op0, insns;
2020 tree fndecl = get_callee_fndecl (exp);
2021 enum machine_mode mode;
2022 bool errno_set = false;
2023 bool try_widening = false;
2024 tree arg;
2026 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2027 return NULL_RTX;
2029 arg = CALL_EXPR_ARG (exp, 0);
2031 switch (DECL_FUNCTION_CODE (fndecl))
2033 CASE_FLT_FN (BUILT_IN_SQRT):
2034 errno_set = ! tree_expr_nonnegative_p (arg);
2035 try_widening = true;
2036 builtin_optab = sqrt_optab;
2037 break;
2038 CASE_FLT_FN (BUILT_IN_EXP):
2039 errno_set = true; builtin_optab = exp_optab; break;
2040 CASE_FLT_FN (BUILT_IN_EXP10):
2041 CASE_FLT_FN (BUILT_IN_POW10):
2042 errno_set = true; builtin_optab = exp10_optab; break;
2043 CASE_FLT_FN (BUILT_IN_EXP2):
2044 errno_set = true; builtin_optab = exp2_optab; break;
2045 CASE_FLT_FN (BUILT_IN_EXPM1):
2046 errno_set = true; builtin_optab = expm1_optab; break;
2047 CASE_FLT_FN (BUILT_IN_LOGB):
2048 errno_set = true; builtin_optab = logb_optab; break;
2049 CASE_FLT_FN (BUILT_IN_LOG):
2050 errno_set = true; builtin_optab = log_optab; break;
2051 CASE_FLT_FN (BUILT_IN_LOG10):
2052 errno_set = true; builtin_optab = log10_optab; break;
2053 CASE_FLT_FN (BUILT_IN_LOG2):
2054 errno_set = true; builtin_optab = log2_optab; break;
2055 CASE_FLT_FN (BUILT_IN_LOG1P):
2056 errno_set = true; builtin_optab = log1p_optab; break;
2057 CASE_FLT_FN (BUILT_IN_ASIN):
2058 builtin_optab = asin_optab; break;
2059 CASE_FLT_FN (BUILT_IN_ACOS):
2060 builtin_optab = acos_optab; break;
2061 CASE_FLT_FN (BUILT_IN_TAN):
2062 builtin_optab = tan_optab; break;
2063 CASE_FLT_FN (BUILT_IN_ATAN):
2064 builtin_optab = atan_optab; break;
2065 CASE_FLT_FN (BUILT_IN_FLOOR):
2066 builtin_optab = floor_optab; break;
2067 CASE_FLT_FN (BUILT_IN_CEIL):
2068 builtin_optab = ceil_optab; break;
2069 CASE_FLT_FN (BUILT_IN_TRUNC):
2070 builtin_optab = btrunc_optab; break;
2071 CASE_FLT_FN (BUILT_IN_ROUND):
2072 builtin_optab = round_optab; break;
2073 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2074 builtin_optab = nearbyint_optab;
2075 if (flag_trapping_math)
2076 break;
2077 /* Else fallthrough and expand as rint. */
2078 CASE_FLT_FN (BUILT_IN_RINT):
2079 builtin_optab = rint_optab; break;
2080 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2081 builtin_optab = significand_optab; break;
2082 default:
2083 gcc_unreachable ();
2086 /* Make a suitable register to place result in. */
2087 mode = TYPE_MODE (TREE_TYPE (exp));
2089 if (! flag_errno_math || ! HONOR_NANS (mode))
2090 errno_set = false;
2092 /* Before working hard, check whether the instruction is available, but try
2093 to widen the mode for specific operations. */
2094 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2095 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2096 && (!errno_set || !optimize_insn_for_size_p ()))
2098 rtx result = gen_reg_rtx (mode);
2100 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2101 need to expand the argument again. This way, we will not perform
2102 side-effects more the once. */
2103 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2105 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2107 start_sequence ();
2109 /* Compute into RESULT.
2110 Set RESULT to wherever the result comes back. */
2111 result = expand_unop (mode, builtin_optab, op0, result, 0);
2113 if (result != 0)
2115 if (errno_set)
2116 expand_errno_check (exp, result);
2118 /* Output the entire sequence. */
2119 insns = get_insns ();
2120 end_sequence ();
2121 emit_insn (insns);
2122 return result;
2125 /* If we were unable to expand via the builtin, stop the sequence
2126 (without outputting the insns) and call to the library function
2127 with the stabilized argument list. */
2128 end_sequence ();
2131 return expand_call (exp, target, target == const0_rtx);
2134 /* Expand a call to the builtin binary math functions (pow and atan2).
2135 Return NULL_RTX if a normal call should be emitted rather than expanding the
2136 function in-line. EXP is the expression that is a call to the builtin
2137 function; if convenient, the result should be placed in TARGET.
2138 SUBTARGET may be used as the target for computing one of EXP's
2139 operands. */
2141 static rtx
2142 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2144 optab builtin_optab;
2145 rtx op0, op1, insns, result;
2146 int op1_type = REAL_TYPE;
2147 tree fndecl = get_callee_fndecl (exp);
2148 tree arg0, arg1;
2149 enum machine_mode mode;
2150 bool errno_set = true;
2152 switch (DECL_FUNCTION_CODE (fndecl))
2154 CASE_FLT_FN (BUILT_IN_SCALBN):
2155 CASE_FLT_FN (BUILT_IN_SCALBLN):
2156 CASE_FLT_FN (BUILT_IN_LDEXP):
2157 op1_type = INTEGER_TYPE;
2158 default:
2159 break;
2162 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2163 return NULL_RTX;
2165 arg0 = CALL_EXPR_ARG (exp, 0);
2166 arg1 = CALL_EXPR_ARG (exp, 1);
2168 switch (DECL_FUNCTION_CODE (fndecl))
2170 CASE_FLT_FN (BUILT_IN_POW):
2171 builtin_optab = pow_optab; break;
2172 CASE_FLT_FN (BUILT_IN_ATAN2):
2173 builtin_optab = atan2_optab; break;
2174 CASE_FLT_FN (BUILT_IN_SCALB):
2175 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2176 return 0;
2177 builtin_optab = scalb_optab; break;
2178 CASE_FLT_FN (BUILT_IN_SCALBN):
2179 CASE_FLT_FN (BUILT_IN_SCALBLN):
2180 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2181 return 0;
2182 /* Fall through... */
2183 CASE_FLT_FN (BUILT_IN_LDEXP):
2184 builtin_optab = ldexp_optab; break;
2185 CASE_FLT_FN (BUILT_IN_FMOD):
2186 builtin_optab = fmod_optab; break;
2187 CASE_FLT_FN (BUILT_IN_REMAINDER):
2188 CASE_FLT_FN (BUILT_IN_DREM):
2189 builtin_optab = remainder_optab; break;
2190 default:
2191 gcc_unreachable ();
2194 /* Make a suitable register to place result in. */
2195 mode = TYPE_MODE (TREE_TYPE (exp));
2197 /* Before working hard, check whether the instruction is available. */
2198 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2199 return NULL_RTX;
2201 result = gen_reg_rtx (mode);
2203 if (! flag_errno_math || ! HONOR_NANS (mode))
2204 errno_set = false;
2206 if (errno_set && optimize_insn_for_size_p ())
2207 return 0;
2209 /* Always stabilize the argument list. */
2210 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2211 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2213 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2214 op1 = expand_normal (arg1);
2216 start_sequence ();
2218 /* Compute into RESULT.
2219 Set RESULT to wherever the result comes back. */
2220 result = expand_binop (mode, builtin_optab, op0, op1,
2221 result, 0, OPTAB_DIRECT);
2223 /* If we were unable to expand via the builtin, stop the sequence
2224 (without outputting the insns) and call to the library function
2225 with the stabilized argument list. */
2226 if (result == 0)
2228 end_sequence ();
2229 return expand_call (exp, target, target == const0_rtx);
2232 if (errno_set)
2233 expand_errno_check (exp, result);
2235 /* Output the entire sequence. */
2236 insns = get_insns ();
2237 end_sequence ();
2238 emit_insn (insns);
2240 return result;
2243 /* Expand a call to the builtin trinary math functions (fma).
2244 Return NULL_RTX if a normal call should be emitted rather than expanding the
2245 function in-line. EXP is the expression that is a call to the builtin
2246 function; if convenient, the result should be placed in TARGET.
2247 SUBTARGET may be used as the target for computing one of EXP's
2248 operands. */
2250 static rtx
2251 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2253 optab builtin_optab;
2254 rtx op0, op1, op2, insns, result;
2255 tree fndecl = get_callee_fndecl (exp);
2256 tree arg0, arg1, arg2;
2257 enum machine_mode mode;
2259 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2260 return NULL_RTX;
2262 arg0 = CALL_EXPR_ARG (exp, 0);
2263 arg1 = CALL_EXPR_ARG (exp, 1);
2264 arg2 = CALL_EXPR_ARG (exp, 2);
2266 switch (DECL_FUNCTION_CODE (fndecl))
2268 CASE_FLT_FN (BUILT_IN_FMA):
2269 builtin_optab = fma_optab; break;
2270 default:
2271 gcc_unreachable ();
2274 /* Make a suitable register to place result in. */
2275 mode = TYPE_MODE (TREE_TYPE (exp));
2277 /* Before working hard, check whether the instruction is available. */
2278 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2279 return NULL_RTX;
2281 result = gen_reg_rtx (mode);
2283 /* Always stabilize the argument list. */
2284 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2285 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2286 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2288 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2289 op1 = expand_normal (arg1);
2290 op2 = expand_normal (arg2);
2292 start_sequence ();
2294 /* Compute into RESULT.
2295 Set RESULT to wherever the result comes back. */
2296 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2297 result, 0);
2299 /* If we were unable to expand via the builtin, stop the sequence
2300 (without outputting the insns) and call to the library function
2301 with the stabilized argument list. */
2302 if (result == 0)
2304 end_sequence ();
2305 return expand_call (exp, target, target == const0_rtx);
2308 /* Output the entire sequence. */
2309 insns = get_insns ();
2310 end_sequence ();
2311 emit_insn (insns);
2313 return result;
2316 /* Expand a call to the builtin sin and cos math functions.
2317 Return NULL_RTX if a normal call should be emitted rather than expanding the
2318 function in-line. EXP is the expression that is a call to the builtin
2319 function; if convenient, the result should be placed in TARGET.
2320 SUBTARGET may be used as the target for computing one of EXP's
2321 operands. */
2323 static rtx
2324 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2326 optab builtin_optab;
2327 rtx op0, insns;
2328 tree fndecl = get_callee_fndecl (exp);
2329 enum machine_mode mode;
2330 tree arg;
2332 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2333 return NULL_RTX;
2335 arg = CALL_EXPR_ARG (exp, 0);
2337 switch (DECL_FUNCTION_CODE (fndecl))
2339 CASE_FLT_FN (BUILT_IN_SIN):
2340 CASE_FLT_FN (BUILT_IN_COS):
2341 builtin_optab = sincos_optab; break;
2342 default:
2343 gcc_unreachable ();
2346 /* Make a suitable register to place result in. */
2347 mode = TYPE_MODE (TREE_TYPE (exp));
2349 /* Check if sincos insn is available, otherwise fallback
2350 to sin or cos insn. */
2351 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2352 switch (DECL_FUNCTION_CODE (fndecl))
2354 CASE_FLT_FN (BUILT_IN_SIN):
2355 builtin_optab = sin_optab; break;
2356 CASE_FLT_FN (BUILT_IN_COS):
2357 builtin_optab = cos_optab; break;
2358 default:
2359 gcc_unreachable ();
2362 /* Before working hard, check whether the instruction is available. */
2363 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2365 rtx result = gen_reg_rtx (mode);
2367 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2368 need to expand the argument again. This way, we will not perform
2369 side-effects more the once. */
2370 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2372 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2374 start_sequence ();
2376 /* Compute into RESULT.
2377 Set RESULT to wherever the result comes back. */
2378 if (builtin_optab == sincos_optab)
2380 int ok;
2382 switch (DECL_FUNCTION_CODE (fndecl))
2384 CASE_FLT_FN (BUILT_IN_SIN):
2385 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2386 break;
2387 CASE_FLT_FN (BUILT_IN_COS):
2388 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2389 break;
2390 default:
2391 gcc_unreachable ();
2393 gcc_assert (ok);
2395 else
2396 result = expand_unop (mode, builtin_optab, op0, result, 0);
2398 if (result != 0)
2400 /* Output the entire sequence. */
2401 insns = get_insns ();
2402 end_sequence ();
2403 emit_insn (insns);
2404 return result;
2407 /* If we were unable to expand via the builtin, stop the sequence
2408 (without outputting the insns) and call to the library function
2409 with the stabilized argument list. */
2410 end_sequence ();
2413 return expand_call (exp, target, target == const0_rtx);
2416 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2417 return an RTL instruction code that implements the functionality.
2418 If that isn't possible or available return CODE_FOR_nothing. */
2420 static enum insn_code
2421 interclass_mathfn_icode (tree arg, tree fndecl)
2423 bool errno_set = false;
2424 optab builtin_optab = unknown_optab;
2425 enum machine_mode mode;
2427 switch (DECL_FUNCTION_CODE (fndecl))
2429 CASE_FLT_FN (BUILT_IN_ILOGB):
2430 errno_set = true; builtin_optab = ilogb_optab; break;
2431 CASE_FLT_FN (BUILT_IN_ISINF):
2432 builtin_optab = isinf_optab; break;
2433 case BUILT_IN_ISNORMAL:
2434 case BUILT_IN_ISFINITE:
2435 CASE_FLT_FN (BUILT_IN_FINITE):
2436 case BUILT_IN_FINITED32:
2437 case BUILT_IN_FINITED64:
2438 case BUILT_IN_FINITED128:
2439 case BUILT_IN_ISINFD32:
2440 case BUILT_IN_ISINFD64:
2441 case BUILT_IN_ISINFD128:
2442 /* These builtins have no optabs (yet). */
2443 break;
2444 default:
2445 gcc_unreachable ();
2448 /* There's no easy way to detect the case we need to set EDOM. */
2449 if (flag_errno_math && errno_set)
2450 return CODE_FOR_nothing;
2452 /* Optab mode depends on the mode of the input argument. */
2453 mode = TYPE_MODE (TREE_TYPE (arg));
2455 if (builtin_optab)
2456 return optab_handler (builtin_optab, mode);
2457 return CODE_FOR_nothing;
2460 /* Expand a call to one of the builtin math functions that operate on
2461 floating point argument and output an integer result (ilogb, isinf,
2462 isnan, etc).
2463 Return 0 if a normal call should be emitted rather than expanding the
2464 function in-line. EXP is the expression that is a call to the builtin
2465 function; if convenient, the result should be placed in TARGET. */
2467 static rtx
2468 expand_builtin_interclass_mathfn (tree exp, rtx target)
2470 enum insn_code icode = CODE_FOR_nothing;
2471 rtx op0;
2472 tree fndecl = get_callee_fndecl (exp);
2473 enum machine_mode mode;
2474 tree arg;
2476 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2477 return NULL_RTX;
2479 arg = CALL_EXPR_ARG (exp, 0);
2480 icode = interclass_mathfn_icode (arg, fndecl);
2481 mode = TYPE_MODE (TREE_TYPE (arg));
2483 if (icode != CODE_FOR_nothing)
2485 struct expand_operand ops[1];
2486 rtx last = get_last_insn ();
2487 tree orig_arg = arg;
2489 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2490 need to expand the argument again. This way, we will not perform
2491 side-effects more the once. */
2492 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2494 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2496 if (mode != GET_MODE (op0))
2497 op0 = convert_to_mode (mode, op0, 0);
2499 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2500 if (maybe_legitimize_operands (icode, 0, 1, ops)
2501 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2502 return ops[0].value;
2504 delete_insns_since (last);
2505 CALL_EXPR_ARG (exp, 0) = orig_arg;
2508 return NULL_RTX;
2511 /* Expand a call to the builtin sincos math function.
2512 Return NULL_RTX if a normal call should be emitted rather than expanding the
2513 function in-line. EXP is the expression that is a call to the builtin
2514 function. */
2516 static rtx
2517 expand_builtin_sincos (tree exp)
2519 rtx op0, op1, op2, target1, target2;
2520 enum machine_mode mode;
2521 tree arg, sinp, cosp;
2522 int result;
2523 location_t loc = EXPR_LOCATION (exp);
2524 tree alias_type, alias_off;
2526 if (!validate_arglist (exp, REAL_TYPE,
2527 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2528 return NULL_RTX;
2530 arg = CALL_EXPR_ARG (exp, 0);
2531 sinp = CALL_EXPR_ARG (exp, 1);
2532 cosp = CALL_EXPR_ARG (exp, 2);
2534 /* Make a suitable register to place result in. */
2535 mode = TYPE_MODE (TREE_TYPE (arg));
2537 /* Check if sincos insn is available, otherwise emit the call. */
2538 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2539 return NULL_RTX;
2541 target1 = gen_reg_rtx (mode);
2542 target2 = gen_reg_rtx (mode);
2544 op0 = expand_normal (arg);
2545 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2546 alias_off = build_int_cst (alias_type, 0);
2547 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2548 sinp, alias_off));
2549 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2550 cosp, alias_off));
2552 /* Compute into target1 and target2.
2553 Set TARGET to wherever the result comes back. */
2554 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2555 gcc_assert (result);
2557 /* Move target1 and target2 to the memory locations indicated
2558 by op1 and op2. */
2559 emit_move_insn (op1, target1);
2560 emit_move_insn (op2, target2);
2562 return const0_rtx;
2565 /* Expand a call to the internal cexpi builtin to the sincos math function.
2566 EXP is the expression that is a call to the builtin function; if convenient,
2567 the result should be placed in TARGET. */
2569 static rtx
2570 expand_builtin_cexpi (tree exp, rtx target)
2572 tree fndecl = get_callee_fndecl (exp);
2573 tree arg, type;
2574 enum machine_mode mode;
2575 rtx op0, op1, op2;
2576 location_t loc = EXPR_LOCATION (exp);
2578 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2579 return NULL_RTX;
2581 arg = CALL_EXPR_ARG (exp, 0);
2582 type = TREE_TYPE (arg);
2583 mode = TYPE_MODE (TREE_TYPE (arg));
2585 /* Try expanding via a sincos optab, fall back to emitting a libcall
2586 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2587 is only generated from sincos, cexp or if we have either of them. */
2588 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2590 op1 = gen_reg_rtx (mode);
2591 op2 = gen_reg_rtx (mode);
2593 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2595 /* Compute into op1 and op2. */
2596 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2598 else if (targetm.libc_has_function (function_sincos))
2600 tree call, fn = NULL_TREE;
2601 tree top1, top2;
2602 rtx op1a, op2a;
2604 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2605 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2606 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2607 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2608 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2609 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2610 else
2611 gcc_unreachable ();
2613 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2614 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2615 op1a = copy_addr_to_reg (XEXP (op1, 0));
2616 op2a = copy_addr_to_reg (XEXP (op2, 0));
2617 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2618 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2620 /* Make sure not to fold the sincos call again. */
2621 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2622 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2623 call, 3, arg, top1, top2));
2625 else
2627 tree call, fn = NULL_TREE, narg;
2628 tree ctype = build_complex_type (type);
2630 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2631 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2632 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2633 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2634 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2635 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2636 else
2637 gcc_unreachable ();
2639 /* If we don't have a decl for cexp create one. This is the
2640 friendliest fallback if the user calls __builtin_cexpi
2641 without full target C99 function support. */
2642 if (fn == NULL_TREE)
2644 tree fntype;
2645 const char *name = NULL;
2647 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2648 name = "cexpf";
2649 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2650 name = "cexp";
2651 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2652 name = "cexpl";
2654 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2655 fn = build_fn_decl (name, fntype);
2658 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2659 build_real (type, dconst0), arg);
2661 /* Make sure not to fold the cexp call again. */
2662 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2663 return expand_expr (build_call_nary (ctype, call, 1, narg),
2664 target, VOIDmode, EXPAND_NORMAL);
2667 /* Now build the proper return type. */
2668 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2669 make_tree (TREE_TYPE (arg), op2),
2670 make_tree (TREE_TYPE (arg), op1)),
2671 target, VOIDmode, EXPAND_NORMAL);
2674 /* Conveniently construct a function call expression. FNDECL names the
2675 function to be called, N is the number of arguments, and the "..."
2676 parameters are the argument expressions. Unlike build_call_exr
2677 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2679 static tree
2680 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2682 va_list ap;
2683 tree fntype = TREE_TYPE (fndecl);
2684 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2686 va_start (ap, n);
2687 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2688 va_end (ap);
2689 SET_EXPR_LOCATION (fn, loc);
2690 return fn;
2693 /* Expand a call to one of the builtin rounding functions gcc defines
2694 as an extension (lfloor and lceil). As these are gcc extensions we
2695 do not need to worry about setting errno to EDOM.
2696 If expanding via optab fails, lower expression to (int)(floor(x)).
2697 EXP is the expression that is a call to the builtin function;
2698 if convenient, the result should be placed in TARGET. */
2700 static rtx
2701 expand_builtin_int_roundingfn (tree exp, rtx target)
2703 convert_optab builtin_optab;
2704 rtx op0, insns, tmp;
2705 tree fndecl = get_callee_fndecl (exp);
2706 enum built_in_function fallback_fn;
2707 tree fallback_fndecl;
2708 enum machine_mode mode;
2709 tree arg;
2711 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2712 gcc_unreachable ();
2714 arg = CALL_EXPR_ARG (exp, 0);
2716 switch (DECL_FUNCTION_CODE (fndecl))
2718 CASE_FLT_FN (BUILT_IN_ICEIL):
2719 CASE_FLT_FN (BUILT_IN_LCEIL):
2720 CASE_FLT_FN (BUILT_IN_LLCEIL):
2721 builtin_optab = lceil_optab;
2722 fallback_fn = BUILT_IN_CEIL;
2723 break;
2725 CASE_FLT_FN (BUILT_IN_IFLOOR):
2726 CASE_FLT_FN (BUILT_IN_LFLOOR):
2727 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2728 builtin_optab = lfloor_optab;
2729 fallback_fn = BUILT_IN_FLOOR;
2730 break;
2732 default:
2733 gcc_unreachable ();
2736 /* Make a suitable register to place result in. */
2737 mode = TYPE_MODE (TREE_TYPE (exp));
2739 target = gen_reg_rtx (mode);
2741 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2742 need to expand the argument again. This way, we will not perform
2743 side-effects more the once. */
2744 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2746 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2748 start_sequence ();
2750 /* Compute into TARGET. */
2751 if (expand_sfix_optab (target, op0, builtin_optab))
2753 /* Output the entire sequence. */
2754 insns = get_insns ();
2755 end_sequence ();
2756 emit_insn (insns);
2757 return target;
2760 /* If we were unable to expand via the builtin, stop the sequence
2761 (without outputting the insns). */
2762 end_sequence ();
2764 /* Fall back to floating point rounding optab. */
2765 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2767 /* For non-C99 targets we may end up without a fallback fndecl here
2768 if the user called __builtin_lfloor directly. In this case emit
2769 a call to the floor/ceil variants nevertheless. This should result
2770 in the best user experience for not full C99 targets. */
2771 if (fallback_fndecl == NULL_TREE)
2773 tree fntype;
2774 const char *name = NULL;
2776 switch (DECL_FUNCTION_CODE (fndecl))
2778 case BUILT_IN_ICEIL:
2779 case BUILT_IN_LCEIL:
2780 case BUILT_IN_LLCEIL:
2781 name = "ceil";
2782 break;
2783 case BUILT_IN_ICEILF:
2784 case BUILT_IN_LCEILF:
2785 case BUILT_IN_LLCEILF:
2786 name = "ceilf";
2787 break;
2788 case BUILT_IN_ICEILL:
2789 case BUILT_IN_LCEILL:
2790 case BUILT_IN_LLCEILL:
2791 name = "ceill";
2792 break;
2793 case BUILT_IN_IFLOOR:
2794 case BUILT_IN_LFLOOR:
2795 case BUILT_IN_LLFLOOR:
2796 name = "floor";
2797 break;
2798 case BUILT_IN_IFLOORF:
2799 case BUILT_IN_LFLOORF:
2800 case BUILT_IN_LLFLOORF:
2801 name = "floorf";
2802 break;
2803 case BUILT_IN_IFLOORL:
2804 case BUILT_IN_LFLOORL:
2805 case BUILT_IN_LLFLOORL:
2806 name = "floorl";
2807 break;
2808 default:
2809 gcc_unreachable ();
2812 fntype = build_function_type_list (TREE_TYPE (arg),
2813 TREE_TYPE (arg), NULL_TREE);
2814 fallback_fndecl = build_fn_decl (name, fntype);
2817 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2819 tmp = expand_normal (exp);
2820 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2822 /* Truncate the result of floating point optab to integer
2823 via expand_fix (). */
2824 target = gen_reg_rtx (mode);
2825 expand_fix (target, tmp, 0);
2827 return target;
2830 /* Expand a call to one of the builtin math functions doing integer
2831 conversion (lrint).
2832 Return 0 if a normal call should be emitted rather than expanding the
2833 function in-line. EXP is the expression that is a call to the builtin
2834 function; if convenient, the result should be placed in TARGET. */
2836 static rtx
2837 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2839 convert_optab builtin_optab;
2840 rtx op0, insns;
2841 tree fndecl = get_callee_fndecl (exp);
2842 tree arg;
2843 enum machine_mode mode;
2844 enum built_in_function fallback_fn = BUILT_IN_NONE;
2846 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2847 gcc_unreachable ();
2849 arg = CALL_EXPR_ARG (exp, 0);
2851 switch (DECL_FUNCTION_CODE (fndecl))
2853 CASE_FLT_FN (BUILT_IN_IRINT):
2854 fallback_fn = BUILT_IN_LRINT;
2855 /* FALLTHRU */
2856 CASE_FLT_FN (BUILT_IN_LRINT):
2857 CASE_FLT_FN (BUILT_IN_LLRINT):
2858 builtin_optab = lrint_optab;
2859 break;
2861 CASE_FLT_FN (BUILT_IN_IROUND):
2862 fallback_fn = BUILT_IN_LROUND;
2863 /* FALLTHRU */
2864 CASE_FLT_FN (BUILT_IN_LROUND):
2865 CASE_FLT_FN (BUILT_IN_LLROUND):
2866 builtin_optab = lround_optab;
2867 break;
2869 default:
2870 gcc_unreachable ();
2873 /* There's no easy way to detect the case we need to set EDOM. */
2874 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2875 return NULL_RTX;
2877 /* Make a suitable register to place result in. */
2878 mode = TYPE_MODE (TREE_TYPE (exp));
2880 /* There's no easy way to detect the case we need to set EDOM. */
2881 if (!flag_errno_math)
2883 rtx result = gen_reg_rtx (mode);
2885 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2886 need to expand the argument again. This way, we will not perform
2887 side-effects more the once. */
2888 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2890 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2892 start_sequence ();
2894 if (expand_sfix_optab (result, op0, builtin_optab))
2896 /* Output the entire sequence. */
2897 insns = get_insns ();
2898 end_sequence ();
2899 emit_insn (insns);
2900 return result;
2903 /* If we were unable to expand via the builtin, stop the sequence
2904 (without outputting the insns) and call to the library function
2905 with the stabilized argument list. */
2906 end_sequence ();
2909 if (fallback_fn != BUILT_IN_NONE)
2911 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2912 targets, (int) round (x) should never be transformed into
2913 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2914 a call to lround in the hope that the target provides at least some
2915 C99 functions. This should result in the best user experience for
2916 not full C99 targets. */
2917 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2918 fallback_fn, 0);
2920 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2921 fallback_fndecl, 1, arg);
2923 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2924 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2925 return convert_to_mode (mode, target, 0);
2928 return expand_call (exp, target, target == const0_rtx);
2931 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2932 a normal call should be emitted rather than expanding the function
2933 in-line. EXP is the expression that is a call to the builtin
2934 function; if convenient, the result should be placed in TARGET. */
2936 static rtx
2937 expand_builtin_powi (tree exp, rtx target)
2939 tree arg0, arg1;
2940 rtx op0, op1;
2941 enum machine_mode mode;
2942 enum machine_mode mode2;
2944 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2945 return NULL_RTX;
2947 arg0 = CALL_EXPR_ARG (exp, 0);
2948 arg1 = CALL_EXPR_ARG (exp, 1);
2949 mode = TYPE_MODE (TREE_TYPE (exp));
2951 /* Emit a libcall to libgcc. */
2953 /* Mode of the 2nd argument must match that of an int. */
2954 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2956 if (target == NULL_RTX)
2957 target = gen_reg_rtx (mode);
2959 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2960 if (GET_MODE (op0) != mode)
2961 op0 = convert_to_mode (mode, op0, 0);
2962 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2963 if (GET_MODE (op1) != mode2)
2964 op1 = convert_to_mode (mode2, op1, 0);
2966 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2967 target, LCT_CONST, mode, 2,
2968 op0, mode, op1, mode2);
2970 return target;
2973 /* Expand expression EXP which is a call to the strlen builtin. Return
2974 NULL_RTX if we failed the caller should emit a normal call, otherwise
2975 try to get the result in TARGET, if convenient. */
2977 static rtx
2978 expand_builtin_strlen (tree exp, rtx target,
2979 enum machine_mode target_mode)
2981 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2982 return NULL_RTX;
2983 else
2985 struct expand_operand ops[4];
2986 rtx pat;
2987 tree len;
2988 tree src = CALL_EXPR_ARG (exp, 0);
2989 rtx src_reg, before_strlen;
2990 enum machine_mode insn_mode = target_mode;
2991 enum insn_code icode = CODE_FOR_nothing;
2992 unsigned int align;
2994 /* If the length can be computed at compile-time, return it. */
2995 len = c_strlen (src, 0);
2996 if (len)
2997 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2999 /* If the length can be computed at compile-time and is constant
3000 integer, but there are side-effects in src, evaluate
3001 src for side-effects, then return len.
3002 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3003 can be optimized into: i++; x = 3; */
3004 len = c_strlen (src, 1);
3005 if (len && TREE_CODE (len) == INTEGER_CST)
3007 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3008 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3011 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3013 /* If SRC is not a pointer type, don't do this operation inline. */
3014 if (align == 0)
3015 return NULL_RTX;
3017 /* Bail out if we can't compute strlen in the right mode. */
3018 while (insn_mode != VOIDmode)
3020 icode = optab_handler (strlen_optab, insn_mode);
3021 if (icode != CODE_FOR_nothing)
3022 break;
3024 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3026 if (insn_mode == VOIDmode)
3027 return NULL_RTX;
3029 /* Make a place to hold the source address. We will not expand
3030 the actual source until we are sure that the expansion will
3031 not fail -- there are trees that cannot be expanded twice. */
3032 src_reg = gen_reg_rtx (Pmode);
3034 /* Mark the beginning of the strlen sequence so we can emit the
3035 source operand later. */
3036 before_strlen = get_last_insn ();
3038 create_output_operand (&ops[0], target, insn_mode);
3039 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3040 create_integer_operand (&ops[2], 0);
3041 create_integer_operand (&ops[3], align);
3042 if (!maybe_expand_insn (icode, 4, ops))
3043 return NULL_RTX;
3045 /* Now that we are assured of success, expand the source. */
3046 start_sequence ();
3047 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3048 if (pat != src_reg)
3050 #ifdef POINTERS_EXTEND_UNSIGNED
3051 if (GET_MODE (pat) != Pmode)
3052 pat = convert_to_mode (Pmode, pat,
3053 POINTERS_EXTEND_UNSIGNED);
3054 #endif
3055 emit_move_insn (src_reg, pat);
3057 pat = get_insns ();
3058 end_sequence ();
3060 if (before_strlen)
3061 emit_insn_after (pat, before_strlen);
3062 else
3063 emit_insn_before (pat, get_insns ());
3065 /* Return the value in the proper mode for this function. */
3066 if (GET_MODE (ops[0].value) == target_mode)
3067 target = ops[0].value;
3068 else if (target != 0)
3069 convert_move (target, ops[0].value, 0);
3070 else
3071 target = convert_to_mode (target_mode, ops[0].value, 0);
3073 return target;
3077 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3078 bytes from constant string DATA + OFFSET and return it as target
3079 constant. */
3081 static rtx
3082 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3083 enum machine_mode mode)
3085 const char *str = (const char *) data;
3087 gcc_assert (offset >= 0
3088 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3089 <= strlen (str) + 1));
3091 return c_readstr (str + offset, mode);
3094 /* LEN specify length of the block of memcpy/memset operation.
3095 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3096 In some cases we can make very likely guess on max size, then we
3097 set it into PROBABLE_MAX_SIZE. */
3099 static void
3100 determine_block_size (tree len, rtx len_rtx,
3101 unsigned HOST_WIDE_INT *min_size,
3102 unsigned HOST_WIDE_INT *max_size,
3103 unsigned HOST_WIDE_INT *probable_max_size)
3105 if (CONST_INT_P (len_rtx))
3107 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3108 return;
3110 else
3112 wide_int min, max;
3113 enum value_range_type range_type = VR_UNDEFINED;
3115 /* Determine bounds from the type. */
3116 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3117 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3118 else
3119 *min_size = 0;
3120 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3121 *probable_max_size = *max_size
3122 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3123 else
3124 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3126 if (TREE_CODE (len) == SSA_NAME)
3127 range_type = get_range_info (len, &min, &max);
3128 if (range_type == VR_RANGE)
3130 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3131 *min_size = min.to_uhwi ();
3132 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3133 *probable_max_size = *max_size = max.to_uhwi ();
3135 else if (range_type == VR_ANTI_RANGE)
3137 /* Anti range 0...N lets us to determine minimal size to N+1. */
3138 if (min == 0)
3140 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3141 *min_size = max.to_uhwi () + 1;
3143 /* Code like
3145 int n;
3146 if (n < 100)
3147 memcpy (a, b, n)
3149 Produce anti range allowing negative values of N. We still
3150 can use the information and make a guess that N is not negative.
3152 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3153 *probable_max_size = min.to_uhwi () - 1;
3156 gcc_checking_assert (*max_size <=
3157 (unsigned HOST_WIDE_INT)
3158 GET_MODE_MASK (GET_MODE (len_rtx)));
3161 /* Expand a call EXP to the memcpy builtin.
3162 Return NULL_RTX if we failed, the caller should emit a normal call,
3163 otherwise try to get the result in TARGET, if convenient (and in
3164 mode MODE if that's convenient). */
3166 static rtx
3167 expand_builtin_memcpy (tree exp, rtx target)
3169 if (!validate_arglist (exp,
3170 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3171 return NULL_RTX;
3172 else
3174 tree dest = CALL_EXPR_ARG (exp, 0);
3175 tree src = CALL_EXPR_ARG (exp, 1);
3176 tree len = CALL_EXPR_ARG (exp, 2);
3177 const char *src_str;
3178 unsigned int src_align = get_pointer_alignment (src);
3179 unsigned int dest_align = get_pointer_alignment (dest);
3180 rtx dest_mem, src_mem, dest_addr, len_rtx;
3181 HOST_WIDE_INT expected_size = -1;
3182 unsigned int expected_align = 0;
3183 unsigned HOST_WIDE_INT min_size;
3184 unsigned HOST_WIDE_INT max_size;
3185 unsigned HOST_WIDE_INT probable_max_size;
3187 /* If DEST is not a pointer type, call the normal function. */
3188 if (dest_align == 0)
3189 return NULL_RTX;
3191 /* If either SRC is not a pointer type, don't do this
3192 operation in-line. */
3193 if (src_align == 0)
3194 return NULL_RTX;
3196 if (currently_expanding_gimple_stmt)
3197 stringop_block_profile (currently_expanding_gimple_stmt,
3198 &expected_align, &expected_size);
3200 if (expected_align < dest_align)
3201 expected_align = dest_align;
3202 dest_mem = get_memory_rtx (dest, len);
3203 set_mem_align (dest_mem, dest_align);
3204 len_rtx = expand_normal (len);
3205 determine_block_size (len, len_rtx, &min_size, &max_size,
3206 &probable_max_size);
3207 src_str = c_getstr (src);
3209 /* If SRC is a string constant and block move would be done
3210 by pieces, we can avoid loading the string from memory
3211 and only stored the computed constants. */
3212 if (src_str
3213 && CONST_INT_P (len_rtx)
3214 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3215 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3216 CONST_CAST (char *, src_str),
3217 dest_align, false))
3219 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3220 builtin_memcpy_read_str,
3221 CONST_CAST (char *, src_str),
3222 dest_align, false, 0);
3223 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3224 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3225 return dest_mem;
3228 src_mem = get_memory_rtx (src, len);
3229 set_mem_align (src_mem, src_align);
3231 /* Copy word part most expediently. */
3232 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3233 CALL_EXPR_TAILCALL (exp)
3234 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3235 expected_align, expected_size,
3236 min_size, max_size, probable_max_size);
3238 if (dest_addr == 0)
3240 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3241 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3243 return dest_addr;
3247 /* Expand a call EXP to the mempcpy builtin.
3248 Return NULL_RTX if we failed; the caller should emit a normal call,
3249 otherwise try to get the result in TARGET, if convenient (and in
3250 mode MODE if that's convenient). If ENDP is 0 return the
3251 destination pointer, if ENDP is 1 return the end pointer ala
3252 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3253 stpcpy. */
3255 static rtx
3256 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3258 if (!validate_arglist (exp,
3259 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3260 return NULL_RTX;
3261 else
3263 tree dest = CALL_EXPR_ARG (exp, 0);
3264 tree src = CALL_EXPR_ARG (exp, 1);
3265 tree len = CALL_EXPR_ARG (exp, 2);
3266 return expand_builtin_mempcpy_args (dest, src, len,
3267 target, mode, /*endp=*/ 1);
3271 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3272 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3273 so that this can also be called without constructing an actual CALL_EXPR.
3274 The other arguments and return value are the same as for
3275 expand_builtin_mempcpy. */
3277 static rtx
3278 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3279 rtx target, enum machine_mode mode, int endp)
3281 /* If return value is ignored, transform mempcpy into memcpy. */
3282 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3284 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3285 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3286 dest, src, len);
3287 return expand_expr (result, target, mode, EXPAND_NORMAL);
3289 else
3291 const char *src_str;
3292 unsigned int src_align = get_pointer_alignment (src);
3293 unsigned int dest_align = get_pointer_alignment (dest);
3294 rtx dest_mem, src_mem, len_rtx;
3296 /* If either SRC or DEST is not a pointer type, don't do this
3297 operation in-line. */
3298 if (dest_align == 0 || src_align == 0)
3299 return NULL_RTX;
3301 /* If LEN is not constant, call the normal function. */
3302 if (! tree_fits_uhwi_p (len))
3303 return NULL_RTX;
3305 len_rtx = expand_normal (len);
3306 src_str = c_getstr (src);
3308 /* If SRC is a string constant and block move would be done
3309 by pieces, we can avoid loading the string from memory
3310 and only stored the computed constants. */
3311 if (src_str
3312 && CONST_INT_P (len_rtx)
3313 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3314 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3315 CONST_CAST (char *, src_str),
3316 dest_align, false))
3318 dest_mem = get_memory_rtx (dest, len);
3319 set_mem_align (dest_mem, dest_align);
3320 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3321 builtin_memcpy_read_str,
3322 CONST_CAST (char *, src_str),
3323 dest_align, false, endp);
3324 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3325 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3326 return dest_mem;
3329 if (CONST_INT_P (len_rtx)
3330 && can_move_by_pieces (INTVAL (len_rtx),
3331 MIN (dest_align, src_align)))
3333 dest_mem = get_memory_rtx (dest, len);
3334 set_mem_align (dest_mem, dest_align);
3335 src_mem = get_memory_rtx (src, len);
3336 set_mem_align (src_mem, src_align);
3337 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3338 MIN (dest_align, src_align), endp);
3339 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3340 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3341 return dest_mem;
3344 return NULL_RTX;
3348 #ifndef HAVE_movstr
3349 # define HAVE_movstr 0
3350 # define CODE_FOR_movstr CODE_FOR_nothing
3351 #endif
3353 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3354 we failed, the caller should emit a normal call, otherwise try to
3355 get the result in TARGET, if convenient. If ENDP is 0 return the
3356 destination pointer, if ENDP is 1 return the end pointer ala
3357 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3358 stpcpy. */
3360 static rtx
3361 expand_movstr (tree dest, tree src, rtx target, int endp)
3363 struct expand_operand ops[3];
3364 rtx dest_mem;
3365 rtx src_mem;
3367 if (!HAVE_movstr)
3368 return NULL_RTX;
3370 dest_mem = get_memory_rtx (dest, NULL);
3371 src_mem = get_memory_rtx (src, NULL);
3372 if (!endp)
3374 target = force_reg (Pmode, XEXP (dest_mem, 0));
3375 dest_mem = replace_equiv_address (dest_mem, target);
3378 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3379 create_fixed_operand (&ops[1], dest_mem);
3380 create_fixed_operand (&ops[2], src_mem);
3381 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3382 return NULL_RTX;
3384 if (endp && target != const0_rtx)
3386 target = ops[0].value;
3387 /* movstr is supposed to set end to the address of the NUL
3388 terminator. If the caller requested a mempcpy-like return value,
3389 adjust it. */
3390 if (endp == 1)
3392 rtx tem = plus_constant (GET_MODE (target),
3393 gen_lowpart (GET_MODE (target), target), 1);
3394 emit_move_insn (target, force_operand (tem, NULL_RTX));
3397 return target;
3400 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3401 NULL_RTX if we failed the caller should emit a normal call, otherwise
3402 try to get the result in TARGET, if convenient (and in mode MODE if that's
3403 convenient). */
3405 static rtx
3406 expand_builtin_strcpy (tree exp, rtx target)
3408 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3410 tree dest = CALL_EXPR_ARG (exp, 0);
3411 tree src = CALL_EXPR_ARG (exp, 1);
3412 return expand_builtin_strcpy_args (dest, src, target);
3414 return NULL_RTX;
3417 /* Helper function to do the actual work for expand_builtin_strcpy. The
3418 arguments to the builtin_strcpy call DEST and SRC are broken out
3419 so that this can also be called without constructing an actual CALL_EXPR.
3420 The other arguments and return value are the same as for
3421 expand_builtin_strcpy. */
3423 static rtx
3424 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3426 return expand_movstr (dest, src, target, /*endp=*/0);
3429 /* Expand a call EXP to the stpcpy builtin.
3430 Return NULL_RTX if we failed the caller should emit a normal call,
3431 otherwise try to get the result in TARGET, if convenient (and in
3432 mode MODE if that's convenient). */
3434 static rtx
3435 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3437 tree dst, src;
3438 location_t loc = EXPR_LOCATION (exp);
3440 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3441 return NULL_RTX;
3443 dst = CALL_EXPR_ARG (exp, 0);
3444 src = CALL_EXPR_ARG (exp, 1);
3446 /* If return value is ignored, transform stpcpy into strcpy. */
3447 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3449 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3450 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3451 return expand_expr (result, target, mode, EXPAND_NORMAL);
3453 else
3455 tree len, lenp1;
3456 rtx ret;
3458 /* Ensure we get an actual string whose length can be evaluated at
3459 compile-time, not an expression containing a string. This is
3460 because the latter will potentially produce pessimized code
3461 when used to produce the return value. */
3462 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3463 return expand_movstr (dst, src, target, /*endp=*/2);
3465 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3466 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3467 target, mode, /*endp=*/2);
3469 if (ret)
3470 return ret;
3472 if (TREE_CODE (len) == INTEGER_CST)
3474 rtx len_rtx = expand_normal (len);
3476 if (CONST_INT_P (len_rtx))
3478 ret = expand_builtin_strcpy_args (dst, src, target);
3480 if (ret)
3482 if (! target)
3484 if (mode != VOIDmode)
3485 target = gen_reg_rtx (mode);
3486 else
3487 target = gen_reg_rtx (GET_MODE (ret));
3489 if (GET_MODE (target) != GET_MODE (ret))
3490 ret = gen_lowpart (GET_MODE (target), ret);
3492 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3493 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3494 gcc_assert (ret);
3496 return target;
3501 return expand_movstr (dst, src, target, /*endp=*/2);
3505 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3506 bytes from constant string DATA + OFFSET and return it as target
3507 constant. */
3510 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3511 enum machine_mode mode)
3513 const char *str = (const char *) data;
3515 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3516 return const0_rtx;
3518 return c_readstr (str + offset, mode);
3521 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3522 NULL_RTX if we failed the caller should emit a normal call. */
3524 static rtx
3525 expand_builtin_strncpy (tree exp, rtx target)
3527 location_t loc = EXPR_LOCATION (exp);
3529 if (validate_arglist (exp,
3530 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3532 tree dest = CALL_EXPR_ARG (exp, 0);
3533 tree src = CALL_EXPR_ARG (exp, 1);
3534 tree len = CALL_EXPR_ARG (exp, 2);
3535 tree slen = c_strlen (src, 1);
3537 /* We must be passed a constant len and src parameter. */
3538 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3539 return NULL_RTX;
3541 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3543 /* We're required to pad with trailing zeros if the requested
3544 len is greater than strlen(s2)+1. In that case try to
3545 use store_by_pieces, if it fails, punt. */
3546 if (tree_int_cst_lt (slen, len))
3548 unsigned int dest_align = get_pointer_alignment (dest);
3549 const char *p = c_getstr (src);
3550 rtx dest_mem;
3552 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3553 || !can_store_by_pieces (tree_to_uhwi (len),
3554 builtin_strncpy_read_str,
3555 CONST_CAST (char *, p),
3556 dest_align, false))
3557 return NULL_RTX;
3559 dest_mem = get_memory_rtx (dest, len);
3560 store_by_pieces (dest_mem, tree_to_uhwi (len),
3561 builtin_strncpy_read_str,
3562 CONST_CAST (char *, p), dest_align, false, 0);
3563 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3564 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3565 return dest_mem;
3568 return NULL_RTX;
3571 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3572 bytes from constant string DATA + OFFSET and return it as target
3573 constant. */
3576 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3577 enum machine_mode mode)
3579 const char *c = (const char *) data;
3580 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3582 memset (p, *c, GET_MODE_SIZE (mode));
3584 return c_readstr (p, mode);
3587 /* Callback routine for store_by_pieces. Return the RTL of a register
3588 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3589 char value given in the RTL register data. For example, if mode is
3590 4 bytes wide, return the RTL for 0x01010101*data. */
3592 static rtx
3593 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3594 enum machine_mode mode)
3596 rtx target, coeff;
3597 size_t size;
3598 char *p;
3600 size = GET_MODE_SIZE (mode);
3601 if (size == 1)
3602 return (rtx) data;
3604 p = XALLOCAVEC (char, size);
3605 memset (p, 1, size);
3606 coeff = c_readstr (p, mode);
3608 target = convert_to_mode (mode, (rtx) data, 1);
3609 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3610 return force_reg (mode, target);
3613 /* Expand expression EXP, which is a call to the memset builtin. Return
3614 NULL_RTX if we failed the caller should emit a normal call, otherwise
3615 try to get the result in TARGET, if convenient (and in mode MODE if that's
3616 convenient). */
3618 static rtx
3619 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3621 if (!validate_arglist (exp,
3622 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3623 return NULL_RTX;
3624 else
3626 tree dest = CALL_EXPR_ARG (exp, 0);
3627 tree val = CALL_EXPR_ARG (exp, 1);
3628 tree len = CALL_EXPR_ARG (exp, 2);
3629 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3633 /* Helper function to do the actual work for expand_builtin_memset. The
3634 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3635 so that this can also be called without constructing an actual CALL_EXPR.
3636 The other arguments and return value are the same as for
3637 expand_builtin_memset. */
3639 static rtx
3640 expand_builtin_memset_args (tree dest, tree val, tree len,
3641 rtx target, enum machine_mode mode, tree orig_exp)
3643 tree fndecl, fn;
3644 enum built_in_function fcode;
3645 enum machine_mode val_mode;
3646 char c;
3647 unsigned int dest_align;
3648 rtx dest_mem, dest_addr, len_rtx;
3649 HOST_WIDE_INT expected_size = -1;
3650 unsigned int expected_align = 0;
3651 unsigned HOST_WIDE_INT min_size;
3652 unsigned HOST_WIDE_INT max_size;
3653 unsigned HOST_WIDE_INT probable_max_size;
3655 dest_align = get_pointer_alignment (dest);
3657 /* If DEST is not a pointer type, don't do this operation in-line. */
3658 if (dest_align == 0)
3659 return NULL_RTX;
3661 if (currently_expanding_gimple_stmt)
3662 stringop_block_profile (currently_expanding_gimple_stmt,
3663 &expected_align, &expected_size);
3665 if (expected_align < dest_align)
3666 expected_align = dest_align;
3668 /* If the LEN parameter is zero, return DEST. */
3669 if (integer_zerop (len))
3671 /* Evaluate and ignore VAL in case it has side-effects. */
3672 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3673 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3676 /* Stabilize the arguments in case we fail. */
3677 dest = builtin_save_expr (dest);
3678 val = builtin_save_expr (val);
3679 len = builtin_save_expr (len);
3681 len_rtx = expand_normal (len);
3682 determine_block_size (len, len_rtx, &min_size, &max_size,
3683 &probable_max_size);
3684 dest_mem = get_memory_rtx (dest, len);
3685 val_mode = TYPE_MODE (unsigned_char_type_node);
3687 if (TREE_CODE (val) != INTEGER_CST)
3689 rtx val_rtx;
3691 val_rtx = expand_normal (val);
3692 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3694 /* Assume that we can memset by pieces if we can store
3695 * the coefficients by pieces (in the required modes).
3696 * We can't pass builtin_memset_gen_str as that emits RTL. */
3697 c = 1;
3698 if (tree_fits_uhwi_p (len)
3699 && can_store_by_pieces (tree_to_uhwi (len),
3700 builtin_memset_read_str, &c, dest_align,
3701 true))
3703 val_rtx = force_reg (val_mode, val_rtx);
3704 store_by_pieces (dest_mem, tree_to_uhwi (len),
3705 builtin_memset_gen_str, val_rtx, dest_align,
3706 true, 0);
3708 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3709 dest_align, expected_align,
3710 expected_size, min_size, max_size,
3711 probable_max_size))
3712 goto do_libcall;
3714 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3715 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3716 return dest_mem;
3719 if (target_char_cast (val, &c))
3720 goto do_libcall;
3722 if (c)
3724 if (tree_fits_uhwi_p (len)
3725 && can_store_by_pieces (tree_to_uhwi (len),
3726 builtin_memset_read_str, &c, dest_align,
3727 true))
3728 store_by_pieces (dest_mem, tree_to_uhwi (len),
3729 builtin_memset_read_str, &c, dest_align, true, 0);
3730 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3731 gen_int_mode (c, val_mode),
3732 dest_align, expected_align,
3733 expected_size, min_size, max_size,
3734 probable_max_size))
3735 goto do_libcall;
3737 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3738 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3739 return dest_mem;
3742 set_mem_align (dest_mem, dest_align);
3743 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3744 CALL_EXPR_TAILCALL (orig_exp)
3745 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3746 expected_align, expected_size,
3747 min_size, max_size,
3748 probable_max_size);
3750 if (dest_addr == 0)
3752 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3753 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3756 return dest_addr;
3758 do_libcall:
3759 fndecl = get_callee_fndecl (orig_exp);
3760 fcode = DECL_FUNCTION_CODE (fndecl);
3761 if (fcode == BUILT_IN_MEMSET)
3762 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3763 dest, val, len);
3764 else if (fcode == BUILT_IN_BZERO)
3765 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3766 dest, len);
3767 else
3768 gcc_unreachable ();
3769 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3770 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3771 return expand_call (fn, target, target == const0_rtx);
3774 /* Expand expression EXP, which is a call to the bzero builtin. Return
3775 NULL_RTX if we failed the caller should emit a normal call. */
3777 static rtx
3778 expand_builtin_bzero (tree exp)
3780 tree dest, size;
3781 location_t loc = EXPR_LOCATION (exp);
3783 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3784 return NULL_RTX;
3786 dest = CALL_EXPR_ARG (exp, 0);
3787 size = CALL_EXPR_ARG (exp, 1);
3789 /* New argument list transforming bzero(ptr x, int y) to
3790 memset(ptr x, int 0, size_t y). This is done this way
3791 so that if it isn't expanded inline, we fallback to
3792 calling bzero instead of memset. */
3794 return expand_builtin_memset_args (dest, integer_zero_node,
3795 fold_convert_loc (loc,
3796 size_type_node, size),
3797 const0_rtx, VOIDmode, exp);
3800 /* Expand expression EXP, which is a call to the memcmp built-in function.
3801 Return NULL_RTX if we failed and the caller should emit a normal call,
3802 otherwise try to get the result in TARGET, if convenient (and in mode
3803 MODE, if that's convenient). */
3805 static rtx
3806 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3807 ATTRIBUTE_UNUSED enum machine_mode mode)
3809 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3811 if (!validate_arglist (exp,
3812 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3813 return NULL_RTX;
3815 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3816 implementing memcmp because it will stop if it encounters two
3817 zero bytes. */
3818 #if defined HAVE_cmpmemsi
3820 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3821 rtx result;
3822 rtx insn;
3823 tree arg1 = CALL_EXPR_ARG (exp, 0);
3824 tree arg2 = CALL_EXPR_ARG (exp, 1);
3825 tree len = CALL_EXPR_ARG (exp, 2);
3827 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3828 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3829 enum machine_mode insn_mode;
3831 if (HAVE_cmpmemsi)
3832 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3833 else
3834 return NULL_RTX;
3836 /* If we don't have POINTER_TYPE, call the function. */
3837 if (arg1_align == 0 || arg2_align == 0)
3838 return NULL_RTX;
3840 /* Make a place to write the result of the instruction. */
3841 result = target;
3842 if (! (result != 0
3843 && REG_P (result) && GET_MODE (result) == insn_mode
3844 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3845 result = gen_reg_rtx (insn_mode);
3847 arg1_rtx = get_memory_rtx (arg1, len);
3848 arg2_rtx = get_memory_rtx (arg2, len);
3849 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3851 /* Set MEM_SIZE as appropriate. */
3852 if (CONST_INT_P (arg3_rtx))
3854 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3855 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3858 if (HAVE_cmpmemsi)
3859 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3860 GEN_INT (MIN (arg1_align, arg2_align)));
3861 else
3862 gcc_unreachable ();
3864 if (insn)
3865 emit_insn (insn);
3866 else
3867 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3868 TYPE_MODE (integer_type_node), 3,
3869 XEXP (arg1_rtx, 0), Pmode,
3870 XEXP (arg2_rtx, 0), Pmode,
3871 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3872 TYPE_UNSIGNED (sizetype)),
3873 TYPE_MODE (sizetype));
3875 /* Return the value in the proper mode for this function. */
3876 mode = TYPE_MODE (TREE_TYPE (exp));
3877 if (GET_MODE (result) == mode)
3878 return result;
3879 else if (target != 0)
3881 convert_move (target, result, 0);
3882 return target;
3884 else
3885 return convert_to_mode (mode, result, 0);
3887 #endif /* HAVE_cmpmemsi. */
3889 return NULL_RTX;
3892 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3893 if we failed the caller should emit a normal call, otherwise try to get
3894 the result in TARGET, if convenient. */
3896 static rtx
3897 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3899 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3900 return NULL_RTX;
3902 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3903 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3904 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3906 rtx arg1_rtx, arg2_rtx;
3907 rtx result, insn = NULL_RTX;
3908 tree fndecl, fn;
3909 tree arg1 = CALL_EXPR_ARG (exp, 0);
3910 tree arg2 = CALL_EXPR_ARG (exp, 1);
3912 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3913 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3915 /* If we don't have POINTER_TYPE, call the function. */
3916 if (arg1_align == 0 || arg2_align == 0)
3917 return NULL_RTX;
3919 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3920 arg1 = builtin_save_expr (arg1);
3921 arg2 = builtin_save_expr (arg2);
3923 arg1_rtx = get_memory_rtx (arg1, NULL);
3924 arg2_rtx = get_memory_rtx (arg2, NULL);
3926 #ifdef HAVE_cmpstrsi
3927 /* Try to call cmpstrsi. */
3928 if (HAVE_cmpstrsi)
3930 enum machine_mode insn_mode
3931 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3933 /* Make a place to write the result of the instruction. */
3934 result = target;
3935 if (! (result != 0
3936 && REG_P (result) && GET_MODE (result) == insn_mode
3937 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3938 result = gen_reg_rtx (insn_mode);
3940 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3941 GEN_INT (MIN (arg1_align, arg2_align)));
3943 #endif
3944 #ifdef HAVE_cmpstrnsi
3945 /* Try to determine at least one length and call cmpstrnsi. */
3946 if (!insn && HAVE_cmpstrnsi)
3948 tree len;
3949 rtx arg3_rtx;
3951 enum machine_mode insn_mode
3952 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3953 tree len1 = c_strlen (arg1, 1);
3954 tree len2 = c_strlen (arg2, 1);
3956 if (len1)
3957 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3958 if (len2)
3959 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3961 /* If we don't have a constant length for the first, use the length
3962 of the second, if we know it. We don't require a constant for
3963 this case; some cost analysis could be done if both are available
3964 but neither is constant. For now, assume they're equally cheap,
3965 unless one has side effects. If both strings have constant lengths,
3966 use the smaller. */
3968 if (!len1)
3969 len = len2;
3970 else if (!len2)
3971 len = len1;
3972 else if (TREE_SIDE_EFFECTS (len1))
3973 len = len2;
3974 else if (TREE_SIDE_EFFECTS (len2))
3975 len = len1;
3976 else if (TREE_CODE (len1) != INTEGER_CST)
3977 len = len2;
3978 else if (TREE_CODE (len2) != INTEGER_CST)
3979 len = len1;
3980 else if (tree_int_cst_lt (len1, len2))
3981 len = len1;
3982 else
3983 len = len2;
3985 /* If both arguments have side effects, we cannot optimize. */
3986 if (!len || TREE_SIDE_EFFECTS (len))
3987 goto do_libcall;
3989 arg3_rtx = expand_normal (len);
3991 /* Make a place to write the result of the instruction. */
3992 result = target;
3993 if (! (result != 0
3994 && REG_P (result) && GET_MODE (result) == insn_mode
3995 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3996 result = gen_reg_rtx (insn_mode);
3998 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3999 GEN_INT (MIN (arg1_align, arg2_align)));
4001 #endif
4003 if (insn)
4005 enum machine_mode mode;
4006 emit_insn (insn);
4008 /* Return the value in the proper mode for this function. */
4009 mode = TYPE_MODE (TREE_TYPE (exp));
4010 if (GET_MODE (result) == mode)
4011 return result;
4012 if (target == 0)
4013 return convert_to_mode (mode, result, 0);
4014 convert_move (target, result, 0);
4015 return target;
4018 /* Expand the library call ourselves using a stabilized argument
4019 list to avoid re-evaluating the function's arguments twice. */
4020 #ifdef HAVE_cmpstrnsi
4021 do_libcall:
4022 #endif
4023 fndecl = get_callee_fndecl (exp);
4024 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4025 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4026 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4027 return expand_call (fn, target, target == const0_rtx);
4029 #endif
4030 return NULL_RTX;
4033 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4034 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4035 the result in TARGET, if convenient. */
4037 static rtx
4038 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4039 ATTRIBUTE_UNUSED enum machine_mode mode)
4041 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4043 if (!validate_arglist (exp,
4044 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4045 return NULL_RTX;
4047 /* If c_strlen can determine an expression for one of the string
4048 lengths, and it doesn't have side effects, then emit cmpstrnsi
4049 using length MIN(strlen(string)+1, arg3). */
4050 #ifdef HAVE_cmpstrnsi
4051 if (HAVE_cmpstrnsi)
4053 tree len, len1, len2;
4054 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4055 rtx result, insn;
4056 tree fndecl, fn;
4057 tree arg1 = CALL_EXPR_ARG (exp, 0);
4058 tree arg2 = CALL_EXPR_ARG (exp, 1);
4059 tree arg3 = CALL_EXPR_ARG (exp, 2);
4061 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4062 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4063 enum machine_mode insn_mode
4064 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4066 len1 = c_strlen (arg1, 1);
4067 len2 = c_strlen (arg2, 1);
4069 if (len1)
4070 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4071 if (len2)
4072 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4074 /* If we don't have a constant length for the first, use the length
4075 of the second, if we know it. We don't require a constant for
4076 this case; some cost analysis could be done if both are available
4077 but neither is constant. For now, assume they're equally cheap,
4078 unless one has side effects. If both strings have constant lengths,
4079 use the smaller. */
4081 if (!len1)
4082 len = len2;
4083 else if (!len2)
4084 len = len1;
4085 else if (TREE_SIDE_EFFECTS (len1))
4086 len = len2;
4087 else if (TREE_SIDE_EFFECTS (len2))
4088 len = len1;
4089 else if (TREE_CODE (len1) != INTEGER_CST)
4090 len = len2;
4091 else if (TREE_CODE (len2) != INTEGER_CST)
4092 len = len1;
4093 else if (tree_int_cst_lt (len1, len2))
4094 len = len1;
4095 else
4096 len = len2;
4098 /* If both arguments have side effects, we cannot optimize. */
4099 if (!len || TREE_SIDE_EFFECTS (len))
4100 return NULL_RTX;
4102 /* The actual new length parameter is MIN(len,arg3). */
4103 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4104 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4106 /* If we don't have POINTER_TYPE, call the function. */
4107 if (arg1_align == 0 || arg2_align == 0)
4108 return NULL_RTX;
4110 /* Make a place to write the result of the instruction. */
4111 result = target;
4112 if (! (result != 0
4113 && REG_P (result) && GET_MODE (result) == insn_mode
4114 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4115 result = gen_reg_rtx (insn_mode);
4117 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4118 arg1 = builtin_save_expr (arg1);
4119 arg2 = builtin_save_expr (arg2);
4120 len = builtin_save_expr (len);
4122 arg1_rtx = get_memory_rtx (arg1, len);
4123 arg2_rtx = get_memory_rtx (arg2, len);
4124 arg3_rtx = expand_normal (len);
4125 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4126 GEN_INT (MIN (arg1_align, arg2_align)));
4127 if (insn)
4129 emit_insn (insn);
4131 /* Return the value in the proper mode for this function. */
4132 mode = TYPE_MODE (TREE_TYPE (exp));
4133 if (GET_MODE (result) == mode)
4134 return result;
4135 if (target == 0)
4136 return convert_to_mode (mode, result, 0);
4137 convert_move (target, result, 0);
4138 return target;
4141 /* Expand the library call ourselves using a stabilized argument
4142 list to avoid re-evaluating the function's arguments twice. */
4143 fndecl = get_callee_fndecl (exp);
4144 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4145 arg1, arg2, len);
4146 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4147 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4148 return expand_call (fn, target, target == const0_rtx);
4150 #endif
4151 return NULL_RTX;
4154 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4155 if that's convenient. */
4158 expand_builtin_saveregs (void)
4160 rtx val, seq;
4162 /* Don't do __builtin_saveregs more than once in a function.
4163 Save the result of the first call and reuse it. */
4164 if (saveregs_value != 0)
4165 return saveregs_value;
4167 /* When this function is called, it means that registers must be
4168 saved on entry to this function. So we migrate the call to the
4169 first insn of this function. */
4171 start_sequence ();
4173 /* Do whatever the machine needs done in this case. */
4174 val = targetm.calls.expand_builtin_saveregs ();
4176 seq = get_insns ();
4177 end_sequence ();
4179 saveregs_value = val;
4181 /* Put the insns after the NOTE that starts the function. If this
4182 is inside a start_sequence, make the outer-level insn chain current, so
4183 the code is placed at the start of the function. */
4184 push_topmost_sequence ();
4185 emit_insn_after (seq, entry_of_function ());
4186 pop_topmost_sequence ();
4188 return val;
4191 /* Expand a call to __builtin_next_arg. */
4193 static rtx
4194 expand_builtin_next_arg (void)
4196 /* Checking arguments is already done in fold_builtin_next_arg
4197 that must be called before this function. */
4198 return expand_binop (ptr_mode, add_optab,
4199 crtl->args.internal_arg_pointer,
4200 crtl->args.arg_offset_rtx,
4201 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4204 /* Make it easier for the backends by protecting the valist argument
4205 from multiple evaluations. */
4207 static tree
4208 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4210 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4212 /* The current way of determining the type of valist is completely
4213 bogus. We should have the information on the va builtin instead. */
4214 if (!vatype)
4215 vatype = targetm.fn_abi_va_list (cfun->decl);
4217 if (TREE_CODE (vatype) == ARRAY_TYPE)
4219 if (TREE_SIDE_EFFECTS (valist))
4220 valist = save_expr (valist);
4222 /* For this case, the backends will be expecting a pointer to
4223 vatype, but it's possible we've actually been given an array
4224 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4225 So fix it. */
4226 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4228 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4229 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4232 else
4234 tree pt = build_pointer_type (vatype);
4236 if (! needs_lvalue)
4238 if (! TREE_SIDE_EFFECTS (valist))
4239 return valist;
4241 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4242 TREE_SIDE_EFFECTS (valist) = 1;
4245 if (TREE_SIDE_EFFECTS (valist))
4246 valist = save_expr (valist);
4247 valist = fold_build2_loc (loc, MEM_REF,
4248 vatype, valist, build_int_cst (pt, 0));
4251 return valist;
4254 /* The "standard" definition of va_list is void*. */
4256 tree
4257 std_build_builtin_va_list (void)
4259 return ptr_type_node;
4262 /* The "standard" abi va_list is va_list_type_node. */
4264 tree
4265 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4267 return va_list_type_node;
4270 /* The "standard" type of va_list is va_list_type_node. */
4272 tree
4273 std_canonical_va_list_type (tree type)
4275 tree wtype, htype;
4277 if (INDIRECT_REF_P (type))
4278 type = TREE_TYPE (type);
4279 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4280 type = TREE_TYPE (type);
4281 wtype = va_list_type_node;
4282 htype = type;
4283 /* Treat structure va_list types. */
4284 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4285 htype = TREE_TYPE (htype);
4286 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4288 /* If va_list is an array type, the argument may have decayed
4289 to a pointer type, e.g. by being passed to another function.
4290 In that case, unwrap both types so that we can compare the
4291 underlying records. */
4292 if (TREE_CODE (htype) == ARRAY_TYPE
4293 || POINTER_TYPE_P (htype))
4295 wtype = TREE_TYPE (wtype);
4296 htype = TREE_TYPE (htype);
4299 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4300 return va_list_type_node;
4302 return NULL_TREE;
4305 /* The "standard" implementation of va_start: just assign `nextarg' to
4306 the variable. */
4308 void
4309 std_expand_builtin_va_start (tree valist, rtx nextarg)
4311 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4312 convert_move (va_r, nextarg, 0);
4315 /* Expand EXP, a call to __builtin_va_start. */
4317 static rtx
4318 expand_builtin_va_start (tree exp)
4320 rtx nextarg;
4321 tree valist;
4322 location_t loc = EXPR_LOCATION (exp);
4324 if (call_expr_nargs (exp) < 2)
4326 error_at (loc, "too few arguments to function %<va_start%>");
4327 return const0_rtx;
4330 if (fold_builtin_next_arg (exp, true))
4331 return const0_rtx;
4333 nextarg = expand_builtin_next_arg ();
4334 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4336 if (targetm.expand_builtin_va_start)
4337 targetm.expand_builtin_va_start (valist, nextarg);
4338 else
4339 std_expand_builtin_va_start (valist, nextarg);
4341 return const0_rtx;
4344 /* Expand EXP, a call to __builtin_va_end. */
4346 static rtx
4347 expand_builtin_va_end (tree exp)
4349 tree valist = CALL_EXPR_ARG (exp, 0);
4351 /* Evaluate for side effects, if needed. I hate macros that don't
4352 do that. */
4353 if (TREE_SIDE_EFFECTS (valist))
4354 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4356 return const0_rtx;
4359 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4360 builtin rather than just as an assignment in stdarg.h because of the
4361 nastiness of array-type va_list types. */
4363 static rtx
4364 expand_builtin_va_copy (tree exp)
4366 tree dst, src, t;
4367 location_t loc = EXPR_LOCATION (exp);
4369 dst = CALL_EXPR_ARG (exp, 0);
4370 src = CALL_EXPR_ARG (exp, 1);
4372 dst = stabilize_va_list_loc (loc, dst, 1);
4373 src = stabilize_va_list_loc (loc, src, 0);
4375 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4377 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4379 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4380 TREE_SIDE_EFFECTS (t) = 1;
4381 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4383 else
4385 rtx dstb, srcb, size;
4387 /* Evaluate to pointers. */
4388 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4389 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4390 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4391 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4393 dstb = convert_memory_address (Pmode, dstb);
4394 srcb = convert_memory_address (Pmode, srcb);
4396 /* "Dereference" to BLKmode memories. */
4397 dstb = gen_rtx_MEM (BLKmode, dstb);
4398 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4399 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4400 srcb = gen_rtx_MEM (BLKmode, srcb);
4401 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4402 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4404 /* Copy. */
4405 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4408 return const0_rtx;
4411 /* Expand a call to one of the builtin functions __builtin_frame_address or
4412 __builtin_return_address. */
4414 static rtx
4415 expand_builtin_frame_address (tree fndecl, tree exp)
4417 /* The argument must be a nonnegative integer constant.
4418 It counts the number of frames to scan up the stack.
4419 The value is the return address saved in that frame. */
4420 if (call_expr_nargs (exp) == 0)
4421 /* Warning about missing arg was already issued. */
4422 return const0_rtx;
4423 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4425 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4426 error ("invalid argument to %<__builtin_frame_address%>");
4427 else
4428 error ("invalid argument to %<__builtin_return_address%>");
4429 return const0_rtx;
4431 else
4433 rtx tem
4434 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4435 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4437 /* Some ports cannot access arbitrary stack frames. */
4438 if (tem == NULL)
4440 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4441 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4442 else
4443 warning (0, "unsupported argument to %<__builtin_return_address%>");
4444 return const0_rtx;
4447 /* For __builtin_frame_address, return what we've got. */
4448 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4449 return tem;
4451 if (!REG_P (tem)
4452 && ! CONSTANT_P (tem))
4453 tem = copy_addr_to_reg (tem);
4454 return tem;
4458 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4459 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4460 is the same as for allocate_dynamic_stack_space. */
4462 static rtx
4463 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4465 rtx op0;
4466 rtx result;
4467 bool valid_arglist;
4468 unsigned int align;
4469 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4470 == BUILT_IN_ALLOCA_WITH_ALIGN);
4472 valid_arglist
4473 = (alloca_with_align
4474 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4475 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4477 if (!valid_arglist)
4478 return NULL_RTX;
4480 /* Compute the argument. */
4481 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4483 /* Compute the alignment. */
4484 align = (alloca_with_align
4485 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4486 : BIGGEST_ALIGNMENT);
4488 /* Allocate the desired space. */
4489 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4490 result = convert_memory_address (ptr_mode, result);
4492 return result;
4495 /* Expand a call to bswap builtin in EXP.
4496 Return NULL_RTX if a normal call should be emitted rather than expanding the
4497 function in-line. If convenient, the result should be placed in TARGET.
4498 SUBTARGET may be used as the target for computing one of EXP's operands. */
4500 static rtx
4501 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4502 rtx subtarget)
4504 tree arg;
4505 rtx op0;
4507 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4508 return NULL_RTX;
4510 arg = CALL_EXPR_ARG (exp, 0);
4511 op0 = expand_expr (arg,
4512 subtarget && GET_MODE (subtarget) == target_mode
4513 ? subtarget : NULL_RTX,
4514 target_mode, EXPAND_NORMAL);
4515 if (GET_MODE (op0) != target_mode)
4516 op0 = convert_to_mode (target_mode, op0, 1);
4518 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4520 gcc_assert (target);
4522 return convert_to_mode (target_mode, target, 1);
4525 /* Expand a call to a unary builtin in EXP.
4526 Return NULL_RTX if a normal call should be emitted rather than expanding the
4527 function in-line. If convenient, the result should be placed in TARGET.
4528 SUBTARGET may be used as the target for computing one of EXP's operands. */
4530 static rtx
4531 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4532 rtx subtarget, optab op_optab)
4534 rtx op0;
4536 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4537 return NULL_RTX;
4539 /* Compute the argument. */
4540 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4541 (subtarget
4542 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4543 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4544 VOIDmode, EXPAND_NORMAL);
4545 /* Compute op, into TARGET if possible.
4546 Set TARGET to wherever the result comes back. */
4547 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4548 op_optab, op0, target, op_optab != clrsb_optab);
4549 gcc_assert (target);
4551 return convert_to_mode (target_mode, target, 0);
4554 /* Expand a call to __builtin_expect. We just return our argument
4555 as the builtin_expect semantic should've been already executed by
4556 tree branch prediction pass. */
4558 static rtx
4559 expand_builtin_expect (tree exp, rtx target)
4561 tree arg;
4563 if (call_expr_nargs (exp) < 2)
4564 return const0_rtx;
4565 arg = CALL_EXPR_ARG (exp, 0);
4567 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4568 /* When guessing was done, the hints should be already stripped away. */
4569 gcc_assert (!flag_guess_branch_prob
4570 || optimize == 0 || seen_error ());
4571 return target;
4574 /* Expand a call to __builtin_assume_aligned. We just return our first
4575 argument as the builtin_assume_aligned semantic should've been already
4576 executed by CCP. */
4578 static rtx
4579 expand_builtin_assume_aligned (tree exp, rtx target)
4581 if (call_expr_nargs (exp) < 2)
4582 return const0_rtx;
4583 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4584 EXPAND_NORMAL);
4585 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4586 && (call_expr_nargs (exp) < 3
4587 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4588 return target;
4591 void
4592 expand_builtin_trap (void)
4594 #ifdef HAVE_trap
4595 if (HAVE_trap)
4597 rtx insn = emit_insn (gen_trap ());
4598 /* For trap insns when not accumulating outgoing args force
4599 REG_ARGS_SIZE note to prevent crossjumping of calls with
4600 different args sizes. */
4601 if (!ACCUMULATE_OUTGOING_ARGS)
4602 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4604 else
4605 #endif
4606 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4607 emit_barrier ();
4610 /* Expand a call to __builtin_unreachable. We do nothing except emit
4611 a barrier saying that control flow will not pass here.
4613 It is the responsibility of the program being compiled to ensure
4614 that control flow does never reach __builtin_unreachable. */
4615 static void
4616 expand_builtin_unreachable (void)
4618 emit_barrier ();
4621 /* Expand EXP, a call to fabs, fabsf or fabsl.
4622 Return NULL_RTX if a normal call should be emitted rather than expanding
4623 the function inline. If convenient, the result should be placed
4624 in TARGET. SUBTARGET may be used as the target for computing
4625 the operand. */
4627 static rtx
4628 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4630 enum machine_mode mode;
4631 tree arg;
4632 rtx op0;
4634 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4635 return NULL_RTX;
4637 arg = CALL_EXPR_ARG (exp, 0);
4638 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4639 mode = TYPE_MODE (TREE_TYPE (arg));
4640 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4641 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4644 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4645 Return NULL is a normal call should be emitted rather than expanding the
4646 function inline. If convenient, the result should be placed in TARGET.
4647 SUBTARGET may be used as the target for computing the operand. */
4649 static rtx
4650 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4652 rtx op0, op1;
4653 tree arg;
4655 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4656 return NULL_RTX;
4658 arg = CALL_EXPR_ARG (exp, 0);
4659 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4661 arg = CALL_EXPR_ARG (exp, 1);
4662 op1 = expand_normal (arg);
4664 return expand_copysign (op0, op1, target);
4667 /* Expand a call to __builtin___clear_cache. */
4669 static rtx
4670 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4672 #ifndef HAVE_clear_cache
4673 #ifdef CLEAR_INSN_CACHE
4674 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4675 does something. Just do the default expansion to a call to
4676 __clear_cache(). */
4677 return NULL_RTX;
4678 #else
4679 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4680 does nothing. There is no need to call it. Do nothing. */
4681 return const0_rtx;
4682 #endif /* CLEAR_INSN_CACHE */
4683 #else
4684 /* We have a "clear_cache" insn, and it will handle everything. */
4685 tree begin, end;
4686 rtx begin_rtx, end_rtx;
4688 /* We must not expand to a library call. If we did, any
4689 fallback library function in libgcc that might contain a call to
4690 __builtin___clear_cache() would recurse infinitely. */
4691 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4693 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4694 return const0_rtx;
4697 if (HAVE_clear_cache)
4699 struct expand_operand ops[2];
4701 begin = CALL_EXPR_ARG (exp, 0);
4702 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4704 end = CALL_EXPR_ARG (exp, 1);
4705 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4707 create_address_operand (&ops[0], begin_rtx);
4708 create_address_operand (&ops[1], end_rtx);
4709 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4710 return const0_rtx;
4712 return const0_rtx;
4713 #endif /* HAVE_clear_cache */
4716 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4718 static rtx
4719 round_trampoline_addr (rtx tramp)
4721 rtx temp, addend, mask;
4723 /* If we don't need too much alignment, we'll have been guaranteed
4724 proper alignment by get_trampoline_type. */
4725 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4726 return tramp;
4728 /* Round address up to desired boundary. */
4729 temp = gen_reg_rtx (Pmode);
4730 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4731 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4733 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4734 temp, 0, OPTAB_LIB_WIDEN);
4735 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4736 temp, 0, OPTAB_LIB_WIDEN);
4738 return tramp;
4741 static rtx
4742 expand_builtin_init_trampoline (tree exp, bool onstack)
4744 tree t_tramp, t_func, t_chain;
4745 rtx m_tramp, r_tramp, r_chain, tmp;
4747 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4748 POINTER_TYPE, VOID_TYPE))
4749 return NULL_RTX;
4751 t_tramp = CALL_EXPR_ARG (exp, 0);
4752 t_func = CALL_EXPR_ARG (exp, 1);
4753 t_chain = CALL_EXPR_ARG (exp, 2);
4755 r_tramp = expand_normal (t_tramp);
4756 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4757 MEM_NOTRAP_P (m_tramp) = 1;
4759 /* If ONSTACK, the TRAMP argument should be the address of a field
4760 within the local function's FRAME decl. Either way, let's see if
4761 we can fill in the MEM_ATTRs for this memory. */
4762 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4763 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4765 /* Creator of a heap trampoline is responsible for making sure the
4766 address is aligned to at least STACK_BOUNDARY. Normally malloc
4767 will ensure this anyhow. */
4768 tmp = round_trampoline_addr (r_tramp);
4769 if (tmp != r_tramp)
4771 m_tramp = change_address (m_tramp, BLKmode, tmp);
4772 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4773 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4776 /* The FUNC argument should be the address of the nested function.
4777 Extract the actual function decl to pass to the hook. */
4778 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4779 t_func = TREE_OPERAND (t_func, 0);
4780 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4782 r_chain = expand_normal (t_chain);
4784 /* Generate insns to initialize the trampoline. */
4785 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4787 if (onstack)
4789 trampolines_created = 1;
4791 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4792 "trampoline generated for nested function %qD", t_func);
4795 return const0_rtx;
4798 static rtx
4799 expand_builtin_adjust_trampoline (tree exp)
4801 rtx tramp;
4803 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4804 return NULL_RTX;
4806 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4807 tramp = round_trampoline_addr (tramp);
4808 if (targetm.calls.trampoline_adjust_address)
4809 tramp = targetm.calls.trampoline_adjust_address (tramp);
4811 return tramp;
4814 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4815 function. The function first checks whether the back end provides
4816 an insn to implement signbit for the respective mode. If not, it
4817 checks whether the floating point format of the value is such that
4818 the sign bit can be extracted. If that is not the case, the
4819 function returns NULL_RTX to indicate that a normal call should be
4820 emitted rather than expanding the function in-line. EXP is the
4821 expression that is a call to the builtin function; if convenient,
4822 the result should be placed in TARGET. */
4823 static rtx
4824 expand_builtin_signbit (tree exp, rtx target)
4826 const struct real_format *fmt;
4827 enum machine_mode fmode, imode, rmode;
4828 tree arg;
4829 int word, bitpos;
4830 enum insn_code icode;
4831 rtx temp;
4832 location_t loc = EXPR_LOCATION (exp);
4834 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4835 return NULL_RTX;
4837 arg = CALL_EXPR_ARG (exp, 0);
4838 fmode = TYPE_MODE (TREE_TYPE (arg));
4839 rmode = TYPE_MODE (TREE_TYPE (exp));
4840 fmt = REAL_MODE_FORMAT (fmode);
4842 arg = builtin_save_expr (arg);
4844 /* Expand the argument yielding a RTX expression. */
4845 temp = expand_normal (arg);
4847 /* Check if the back end provides an insn that handles signbit for the
4848 argument's mode. */
4849 icode = optab_handler (signbit_optab, fmode);
4850 if (icode != CODE_FOR_nothing)
4852 rtx last = get_last_insn ();
4853 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4854 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4855 return target;
4856 delete_insns_since (last);
4859 /* For floating point formats without a sign bit, implement signbit
4860 as "ARG < 0.0". */
4861 bitpos = fmt->signbit_ro;
4862 if (bitpos < 0)
4864 /* But we can't do this if the format supports signed zero. */
4865 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4866 return NULL_RTX;
4868 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4869 build_real (TREE_TYPE (arg), dconst0));
4870 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4873 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4875 imode = int_mode_for_mode (fmode);
4876 if (imode == BLKmode)
4877 return NULL_RTX;
4878 temp = gen_lowpart (imode, temp);
4880 else
4882 imode = word_mode;
4883 /* Handle targets with different FP word orders. */
4884 if (FLOAT_WORDS_BIG_ENDIAN)
4885 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4886 else
4887 word = bitpos / BITS_PER_WORD;
4888 temp = operand_subword_force (temp, word, fmode);
4889 bitpos = bitpos % BITS_PER_WORD;
4892 /* Force the intermediate word_mode (or narrower) result into a
4893 register. This avoids attempting to create paradoxical SUBREGs
4894 of floating point modes below. */
4895 temp = force_reg (imode, temp);
4897 /* If the bitpos is within the "result mode" lowpart, the operation
4898 can be implement with a single bitwise AND. Otherwise, we need
4899 a right shift and an AND. */
4901 if (bitpos < GET_MODE_BITSIZE (rmode))
4903 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4905 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4906 temp = gen_lowpart (rmode, temp);
4907 temp = expand_binop (rmode, and_optab, temp,
4908 immed_wide_int_const (mask, rmode),
4909 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4911 else
4913 /* Perform a logical right shift to place the signbit in the least
4914 significant bit, then truncate the result to the desired mode
4915 and mask just this bit. */
4916 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4917 temp = gen_lowpart (rmode, temp);
4918 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4919 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4922 return temp;
4925 /* Expand fork or exec calls. TARGET is the desired target of the
4926 call. EXP is the call. FN is the
4927 identificator of the actual function. IGNORE is nonzero if the
4928 value is to be ignored. */
4930 static rtx
4931 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4933 tree id, decl;
4934 tree call;
4936 /* If we are not profiling, just call the function. */
4937 if (!profile_arc_flag)
4938 return NULL_RTX;
4940 /* Otherwise call the wrapper. This should be equivalent for the rest of
4941 compiler, so the code does not diverge, and the wrapper may run the
4942 code necessary for keeping the profiling sane. */
4944 switch (DECL_FUNCTION_CODE (fn))
4946 case BUILT_IN_FORK:
4947 id = get_identifier ("__gcov_fork");
4948 break;
4950 case BUILT_IN_EXECL:
4951 id = get_identifier ("__gcov_execl");
4952 break;
4954 case BUILT_IN_EXECV:
4955 id = get_identifier ("__gcov_execv");
4956 break;
4958 case BUILT_IN_EXECLP:
4959 id = get_identifier ("__gcov_execlp");
4960 break;
4962 case BUILT_IN_EXECLE:
4963 id = get_identifier ("__gcov_execle");
4964 break;
4966 case BUILT_IN_EXECVP:
4967 id = get_identifier ("__gcov_execvp");
4968 break;
4970 case BUILT_IN_EXECVE:
4971 id = get_identifier ("__gcov_execve");
4972 break;
4974 default:
4975 gcc_unreachable ();
4978 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4979 FUNCTION_DECL, id, TREE_TYPE (fn));
4980 DECL_EXTERNAL (decl) = 1;
4981 TREE_PUBLIC (decl) = 1;
4982 DECL_ARTIFICIAL (decl) = 1;
4983 TREE_NOTHROW (decl) = 1;
4984 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4985 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4986 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4987 return expand_call (call, target, ignore);
4992 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4993 the pointer in these functions is void*, the tree optimizers may remove
4994 casts. The mode computed in expand_builtin isn't reliable either, due
4995 to __sync_bool_compare_and_swap.
4997 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4998 group of builtins. This gives us log2 of the mode size. */
5000 static inline enum machine_mode
5001 get_builtin_sync_mode (int fcode_diff)
5003 /* The size is not negotiable, so ask not to get BLKmode in return
5004 if the target indicates that a smaller size would be better. */
5005 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5008 /* Expand the memory expression LOC and return the appropriate memory operand
5009 for the builtin_sync operations. */
5011 static rtx
5012 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5014 rtx addr, mem;
5016 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5017 addr = convert_memory_address (Pmode, addr);
5019 /* Note that we explicitly do not want any alias information for this
5020 memory, so that we kill all other live memories. Otherwise we don't
5021 satisfy the full barrier semantics of the intrinsic. */
5022 mem = validize_mem (gen_rtx_MEM (mode, addr));
5024 /* The alignment needs to be at least according to that of the mode. */
5025 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5026 get_pointer_alignment (loc)));
5027 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5028 MEM_VOLATILE_P (mem) = 1;
5030 return mem;
5033 /* Make sure an argument is in the right mode.
5034 EXP is the tree argument.
5035 MODE is the mode it should be in. */
5037 static rtx
5038 expand_expr_force_mode (tree exp, enum machine_mode mode)
5040 rtx val;
5041 enum machine_mode old_mode;
5043 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5044 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5045 of CONST_INTs, where we know the old_mode only from the call argument. */
5047 old_mode = GET_MODE (val);
5048 if (old_mode == VOIDmode)
5049 old_mode = TYPE_MODE (TREE_TYPE (exp));
5050 val = convert_modes (mode, old_mode, val, 1);
5051 return val;
5055 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5056 EXP is the CALL_EXPR. CODE is the rtx code
5057 that corresponds to the arithmetic or logical operation from the name;
5058 an exception here is that NOT actually means NAND. TARGET is an optional
5059 place for us to store the results; AFTER is true if this is the
5060 fetch_and_xxx form. */
5062 static rtx
5063 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5064 enum rtx_code code, bool after,
5065 rtx target)
5067 rtx val, mem;
5068 location_t loc = EXPR_LOCATION (exp);
5070 if (code == NOT && warn_sync_nand)
5072 tree fndecl = get_callee_fndecl (exp);
5073 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5075 static bool warned_f_a_n, warned_n_a_f;
5077 switch (fcode)
5079 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5080 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5081 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5082 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5083 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5084 if (warned_f_a_n)
5085 break;
5087 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5088 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5089 warned_f_a_n = true;
5090 break;
5092 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5093 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5094 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5095 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5096 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5097 if (warned_n_a_f)
5098 break;
5100 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5101 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5102 warned_n_a_f = true;
5103 break;
5105 default:
5106 gcc_unreachable ();
5110 /* Expand the operands. */
5111 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5112 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5114 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5115 after);
5118 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5119 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5120 true if this is the boolean form. TARGET is a place for us to store the
5121 results; this is NOT optional if IS_BOOL is true. */
5123 static rtx
5124 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5125 bool is_bool, rtx target)
5127 rtx old_val, new_val, mem;
5128 rtx *pbool, *poval;
5130 /* Expand the operands. */
5131 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5132 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5133 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5135 pbool = poval = NULL;
5136 if (target != const0_rtx)
5138 if (is_bool)
5139 pbool = &target;
5140 else
5141 poval = &target;
5143 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5144 false, MEMMODEL_SEQ_CST,
5145 MEMMODEL_SEQ_CST))
5146 return NULL_RTX;
5148 return target;
5151 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5152 general form is actually an atomic exchange, and some targets only
5153 support a reduced form with the second argument being a constant 1.
5154 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5155 the results. */
5157 static rtx
5158 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5159 rtx target)
5161 rtx val, mem;
5163 /* Expand the operands. */
5164 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5165 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5167 return expand_sync_lock_test_and_set (target, mem, val);
5170 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5172 static void
5173 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5175 rtx mem;
5177 /* Expand the operands. */
5178 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5180 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5183 /* Given an integer representing an ``enum memmodel'', verify its
5184 correctness and return the memory model enum. */
5186 static enum memmodel
5187 get_memmodel (tree exp)
5189 rtx op;
5190 unsigned HOST_WIDE_INT val;
5192 /* If the parameter is not a constant, it's a run time value so we'll just
5193 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5194 if (TREE_CODE (exp) != INTEGER_CST)
5195 return MEMMODEL_SEQ_CST;
5197 op = expand_normal (exp);
5199 val = INTVAL (op);
5200 if (targetm.memmodel_check)
5201 val = targetm.memmodel_check (val);
5202 else if (val & ~MEMMODEL_MASK)
5204 warning (OPT_Winvalid_memory_model,
5205 "Unknown architecture specifier in memory model to builtin.");
5206 return MEMMODEL_SEQ_CST;
5209 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5211 warning (OPT_Winvalid_memory_model,
5212 "invalid memory model argument to builtin");
5213 return MEMMODEL_SEQ_CST;
5216 return (enum memmodel) val;
5219 /* Expand the __atomic_exchange intrinsic:
5220 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5221 EXP is the CALL_EXPR.
5222 TARGET is an optional place for us to store the results. */
5224 static rtx
5225 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5227 rtx val, mem;
5228 enum memmodel model;
5230 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5231 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5233 error ("invalid memory model for %<__atomic_exchange%>");
5234 return NULL_RTX;
5237 if (!flag_inline_atomics)
5238 return NULL_RTX;
5240 /* Expand the operands. */
5241 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5242 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5244 return expand_atomic_exchange (target, mem, val, model);
5247 /* Expand the __atomic_compare_exchange intrinsic:
5248 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5249 TYPE desired, BOOL weak,
5250 enum memmodel success,
5251 enum memmodel failure)
5252 EXP is the CALL_EXPR.
5253 TARGET is an optional place for us to store the results. */
5255 static rtx
5256 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5257 rtx target)
5259 rtx expect, desired, mem, oldval, label;
5260 enum memmodel success, failure;
5261 tree weak;
5262 bool is_weak;
5264 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5265 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5267 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5268 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5270 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5271 return NULL_RTX;
5274 if (failure > success)
5276 error ("failure memory model cannot be stronger than success "
5277 "memory model for %<__atomic_compare_exchange%>");
5278 return NULL_RTX;
5281 if (!flag_inline_atomics)
5282 return NULL_RTX;
5284 /* Expand the operands. */
5285 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5287 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5288 expect = convert_memory_address (Pmode, expect);
5289 expect = gen_rtx_MEM (mode, expect);
5290 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5292 weak = CALL_EXPR_ARG (exp, 3);
5293 is_weak = false;
5294 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5295 is_weak = true;
5297 if (target == const0_rtx)
5298 target = NULL;
5300 /* Lest the rtl backend create a race condition with an imporoper store
5301 to memory, always create a new pseudo for OLDVAL. */
5302 oldval = NULL;
5304 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5305 is_weak, success, failure))
5306 return NULL_RTX;
5308 /* Conditionally store back to EXPECT, lest we create a race condition
5309 with an improper store to memory. */
5310 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5311 the normal case where EXPECT is totally private, i.e. a register. At
5312 which point the store can be unconditional. */
5313 label = gen_label_rtx ();
5314 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5315 emit_move_insn (expect, oldval);
5316 emit_label (label);
5318 return target;
5321 /* Expand the __atomic_load intrinsic:
5322 TYPE __atomic_load (TYPE *object, enum memmodel)
5323 EXP is the CALL_EXPR.
5324 TARGET is an optional place for us to store the results. */
5326 static rtx
5327 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5329 rtx mem;
5330 enum memmodel model;
5332 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5333 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5334 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5336 error ("invalid memory model for %<__atomic_load%>");
5337 return NULL_RTX;
5340 if (!flag_inline_atomics)
5341 return NULL_RTX;
5343 /* Expand the operand. */
5344 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5346 return expand_atomic_load (target, mem, model);
5350 /* Expand the __atomic_store intrinsic:
5351 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5352 EXP is the CALL_EXPR.
5353 TARGET is an optional place for us to store the results. */
5355 static rtx
5356 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5358 rtx mem, val;
5359 enum memmodel model;
5361 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5362 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5363 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5364 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5366 error ("invalid memory model for %<__atomic_store%>");
5367 return NULL_RTX;
5370 if (!flag_inline_atomics)
5371 return NULL_RTX;
5373 /* Expand the operands. */
5374 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5375 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5377 return expand_atomic_store (mem, val, model, false);
5380 /* Expand the __atomic_fetch_XXX intrinsic:
5381 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5382 EXP is the CALL_EXPR.
5383 TARGET is an optional place for us to store the results.
5384 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5385 FETCH_AFTER is true if returning the result of the operation.
5386 FETCH_AFTER is false if returning the value before the operation.
5387 IGNORE is true if the result is not used.
5388 EXT_CALL is the correct builtin for an external call if this cannot be
5389 resolved to an instruction sequence. */
5391 static rtx
5392 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5393 enum rtx_code code, bool fetch_after,
5394 bool ignore, enum built_in_function ext_call)
5396 rtx val, mem, ret;
5397 enum memmodel model;
5398 tree fndecl;
5399 tree addr;
5401 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5403 /* Expand the operands. */
5404 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5405 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5407 /* Only try generating instructions if inlining is turned on. */
5408 if (flag_inline_atomics)
5410 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5411 if (ret)
5412 return ret;
5415 /* Return if a different routine isn't needed for the library call. */
5416 if (ext_call == BUILT_IN_NONE)
5417 return NULL_RTX;
5419 /* Change the call to the specified function. */
5420 fndecl = get_callee_fndecl (exp);
5421 addr = CALL_EXPR_FN (exp);
5422 STRIP_NOPS (addr);
5424 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5425 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5427 /* Expand the call here so we can emit trailing code. */
5428 ret = expand_call (exp, target, ignore);
5430 /* Replace the original function just in case it matters. */
5431 TREE_OPERAND (addr, 0) = fndecl;
5433 /* Then issue the arithmetic correction to return the right result. */
5434 if (!ignore)
5436 if (code == NOT)
5438 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5439 OPTAB_LIB_WIDEN);
5440 ret = expand_simple_unop (mode, NOT, ret, target, true);
5442 else
5443 ret = expand_simple_binop (mode, code, ret, val, target, true,
5444 OPTAB_LIB_WIDEN);
5446 return ret;
5450 #ifndef HAVE_atomic_clear
5451 # define HAVE_atomic_clear 0
5452 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5453 #endif
5455 /* Expand an atomic clear operation.
5456 void _atomic_clear (BOOL *obj, enum memmodel)
5457 EXP is the call expression. */
5459 static rtx
5460 expand_builtin_atomic_clear (tree exp)
5462 enum machine_mode mode;
5463 rtx mem, ret;
5464 enum memmodel model;
5466 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5467 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5468 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5470 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5471 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5473 error ("invalid memory model for %<__atomic_store%>");
5474 return const0_rtx;
5477 if (HAVE_atomic_clear)
5479 emit_insn (gen_atomic_clear (mem, model));
5480 return const0_rtx;
5483 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5484 Failing that, a store is issued by __atomic_store. The only way this can
5485 fail is if the bool type is larger than a word size. Unlikely, but
5486 handle it anyway for completeness. Assume a single threaded model since
5487 there is no atomic support in this case, and no barriers are required. */
5488 ret = expand_atomic_store (mem, const0_rtx, model, true);
5489 if (!ret)
5490 emit_move_insn (mem, const0_rtx);
5491 return const0_rtx;
5494 /* Expand an atomic test_and_set operation.
5495 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5496 EXP is the call expression. */
5498 static rtx
5499 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5501 rtx mem;
5502 enum memmodel model;
5503 enum machine_mode mode;
5505 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5506 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5507 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5509 return expand_atomic_test_and_set (target, mem, model);
5513 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5514 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5516 static tree
5517 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5519 int size;
5520 enum machine_mode mode;
5521 unsigned int mode_align, type_align;
5523 if (TREE_CODE (arg0) != INTEGER_CST)
5524 return NULL_TREE;
5526 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5527 mode = mode_for_size (size, MODE_INT, 0);
5528 mode_align = GET_MODE_ALIGNMENT (mode);
5530 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5531 type_align = mode_align;
5532 else
5534 tree ttype = TREE_TYPE (arg1);
5536 /* This function is usually invoked and folded immediately by the front
5537 end before anything else has a chance to look at it. The pointer
5538 parameter at this point is usually cast to a void *, so check for that
5539 and look past the cast. */
5540 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5541 && VOID_TYPE_P (TREE_TYPE (ttype)))
5542 arg1 = TREE_OPERAND (arg1, 0);
5544 ttype = TREE_TYPE (arg1);
5545 gcc_assert (POINTER_TYPE_P (ttype));
5547 /* Get the underlying type of the object. */
5548 ttype = TREE_TYPE (ttype);
5549 type_align = TYPE_ALIGN (ttype);
5552 /* If the object has smaller alignment, the the lock free routines cannot
5553 be used. */
5554 if (type_align < mode_align)
5555 return boolean_false_node;
5557 /* Check if a compare_and_swap pattern exists for the mode which represents
5558 the required size. The pattern is not allowed to fail, so the existence
5559 of the pattern indicates support is present. */
5560 if (can_compare_and_swap_p (mode, true))
5561 return boolean_true_node;
5562 else
5563 return boolean_false_node;
5566 /* Return true if the parameters to call EXP represent an object which will
5567 always generate lock free instructions. The first argument represents the
5568 size of the object, and the second parameter is a pointer to the object
5569 itself. If NULL is passed for the object, then the result is based on
5570 typical alignment for an object of the specified size. Otherwise return
5571 false. */
5573 static rtx
5574 expand_builtin_atomic_always_lock_free (tree exp)
5576 tree size;
5577 tree arg0 = CALL_EXPR_ARG (exp, 0);
5578 tree arg1 = CALL_EXPR_ARG (exp, 1);
5580 if (TREE_CODE (arg0) != INTEGER_CST)
5582 error ("non-constant argument 1 to __atomic_always_lock_free");
5583 return const0_rtx;
5586 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5587 if (size == boolean_true_node)
5588 return const1_rtx;
5589 return const0_rtx;
5592 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5593 is lock free on this architecture. */
5595 static tree
5596 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5598 if (!flag_inline_atomics)
5599 return NULL_TREE;
5601 /* If it isn't always lock free, don't generate a result. */
5602 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5603 return boolean_true_node;
5605 return NULL_TREE;
5608 /* Return true if the parameters to call EXP represent an object which will
5609 always generate lock free instructions. The first argument represents the
5610 size of the object, and the second parameter is a pointer to the object
5611 itself. If NULL is passed for the object, then the result is based on
5612 typical alignment for an object of the specified size. Otherwise return
5613 NULL*/
5615 static rtx
5616 expand_builtin_atomic_is_lock_free (tree exp)
5618 tree size;
5619 tree arg0 = CALL_EXPR_ARG (exp, 0);
5620 tree arg1 = CALL_EXPR_ARG (exp, 1);
5622 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5624 error ("non-integer argument 1 to __atomic_is_lock_free");
5625 return NULL_RTX;
5628 if (!flag_inline_atomics)
5629 return NULL_RTX;
5631 /* If the value is known at compile time, return the RTX for it. */
5632 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5633 if (size == boolean_true_node)
5634 return const1_rtx;
5636 return NULL_RTX;
5639 /* Expand the __atomic_thread_fence intrinsic:
5640 void __atomic_thread_fence (enum memmodel)
5641 EXP is the CALL_EXPR. */
5643 static void
5644 expand_builtin_atomic_thread_fence (tree exp)
5646 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5647 expand_mem_thread_fence (model);
5650 /* Expand the __atomic_signal_fence intrinsic:
5651 void __atomic_signal_fence (enum memmodel)
5652 EXP is the CALL_EXPR. */
5654 static void
5655 expand_builtin_atomic_signal_fence (tree exp)
5657 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5658 expand_mem_signal_fence (model);
5661 /* Expand the __sync_synchronize intrinsic. */
5663 static void
5664 expand_builtin_sync_synchronize (void)
5666 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5669 static rtx
5670 expand_builtin_thread_pointer (tree exp, rtx target)
5672 enum insn_code icode;
5673 if (!validate_arglist (exp, VOID_TYPE))
5674 return const0_rtx;
5675 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5676 if (icode != CODE_FOR_nothing)
5678 struct expand_operand op;
5679 /* If the target is not sutitable then create a new target. */
5680 if (target == NULL_RTX
5681 || !REG_P (target)
5682 || GET_MODE (target) != Pmode)
5683 target = gen_reg_rtx (Pmode);
5684 create_output_operand (&op, target, Pmode);
5685 expand_insn (icode, 1, &op);
5686 return target;
5688 error ("__builtin_thread_pointer is not supported on this target");
5689 return const0_rtx;
5692 static void
5693 expand_builtin_set_thread_pointer (tree exp)
5695 enum insn_code icode;
5696 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5697 return;
5698 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5699 if (icode != CODE_FOR_nothing)
5701 struct expand_operand op;
5702 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5703 Pmode, EXPAND_NORMAL);
5704 create_input_operand (&op, val, Pmode);
5705 expand_insn (icode, 1, &op);
5706 return;
5708 error ("__builtin_set_thread_pointer is not supported on this target");
5712 /* Emit code to restore the current value of stack. */
5714 static void
5715 expand_stack_restore (tree var)
5717 rtx prev, sa = expand_normal (var);
5719 sa = convert_memory_address (Pmode, sa);
5721 prev = get_last_insn ();
5722 emit_stack_restore (SAVE_BLOCK, sa);
5723 fixup_args_size_notes (prev, get_last_insn (), 0);
5727 /* Emit code to save the current value of stack. */
5729 static rtx
5730 expand_stack_save (void)
5732 rtx ret = NULL_RTX;
5734 do_pending_stack_adjust ();
5735 emit_stack_save (SAVE_BLOCK, &ret);
5736 return ret;
5739 /* Expand an expression EXP that calls a built-in function,
5740 with result going to TARGET if that's convenient
5741 (and in mode MODE if that's convenient).
5742 SUBTARGET may be used as the target for computing one of EXP's operands.
5743 IGNORE is nonzero if the value is to be ignored. */
5746 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5747 int ignore)
5749 tree fndecl = get_callee_fndecl (exp);
5750 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5751 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5752 int flags;
5754 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5755 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5757 /* When not optimizing, generate calls to library functions for a certain
5758 set of builtins. */
5759 if (!optimize
5760 && !called_as_built_in (fndecl)
5761 && fcode != BUILT_IN_FORK
5762 && fcode != BUILT_IN_EXECL
5763 && fcode != BUILT_IN_EXECV
5764 && fcode != BUILT_IN_EXECLP
5765 && fcode != BUILT_IN_EXECLE
5766 && fcode != BUILT_IN_EXECVP
5767 && fcode != BUILT_IN_EXECVE
5768 && fcode != BUILT_IN_ALLOCA
5769 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5770 && fcode != BUILT_IN_FREE)
5771 return expand_call (exp, target, ignore);
5773 /* The built-in function expanders test for target == const0_rtx
5774 to determine whether the function's result will be ignored. */
5775 if (ignore)
5776 target = const0_rtx;
5778 /* If the result of a pure or const built-in function is ignored, and
5779 none of its arguments are volatile, we can avoid expanding the
5780 built-in call and just evaluate the arguments for side-effects. */
5781 if (target == const0_rtx
5782 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5783 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5785 bool volatilep = false;
5786 tree arg;
5787 call_expr_arg_iterator iter;
5789 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5790 if (TREE_THIS_VOLATILE (arg))
5792 volatilep = true;
5793 break;
5796 if (! volatilep)
5798 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5799 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5800 return const0_rtx;
5804 switch (fcode)
5806 CASE_FLT_FN (BUILT_IN_FABS):
5807 case BUILT_IN_FABSD32:
5808 case BUILT_IN_FABSD64:
5809 case BUILT_IN_FABSD128:
5810 target = expand_builtin_fabs (exp, target, subtarget);
5811 if (target)
5812 return target;
5813 break;
5815 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5816 target = expand_builtin_copysign (exp, target, subtarget);
5817 if (target)
5818 return target;
5819 break;
5821 /* Just do a normal library call if we were unable to fold
5822 the values. */
5823 CASE_FLT_FN (BUILT_IN_CABS):
5824 break;
5826 CASE_FLT_FN (BUILT_IN_EXP):
5827 CASE_FLT_FN (BUILT_IN_EXP10):
5828 CASE_FLT_FN (BUILT_IN_POW10):
5829 CASE_FLT_FN (BUILT_IN_EXP2):
5830 CASE_FLT_FN (BUILT_IN_EXPM1):
5831 CASE_FLT_FN (BUILT_IN_LOGB):
5832 CASE_FLT_FN (BUILT_IN_LOG):
5833 CASE_FLT_FN (BUILT_IN_LOG10):
5834 CASE_FLT_FN (BUILT_IN_LOG2):
5835 CASE_FLT_FN (BUILT_IN_LOG1P):
5836 CASE_FLT_FN (BUILT_IN_TAN):
5837 CASE_FLT_FN (BUILT_IN_ASIN):
5838 CASE_FLT_FN (BUILT_IN_ACOS):
5839 CASE_FLT_FN (BUILT_IN_ATAN):
5840 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5841 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5842 because of possible accuracy problems. */
5843 if (! flag_unsafe_math_optimizations)
5844 break;
5845 CASE_FLT_FN (BUILT_IN_SQRT):
5846 CASE_FLT_FN (BUILT_IN_FLOOR):
5847 CASE_FLT_FN (BUILT_IN_CEIL):
5848 CASE_FLT_FN (BUILT_IN_TRUNC):
5849 CASE_FLT_FN (BUILT_IN_ROUND):
5850 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5851 CASE_FLT_FN (BUILT_IN_RINT):
5852 target = expand_builtin_mathfn (exp, target, subtarget);
5853 if (target)
5854 return target;
5855 break;
5857 CASE_FLT_FN (BUILT_IN_FMA):
5858 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5859 if (target)
5860 return target;
5861 break;
5863 CASE_FLT_FN (BUILT_IN_ILOGB):
5864 if (! flag_unsafe_math_optimizations)
5865 break;
5866 CASE_FLT_FN (BUILT_IN_ISINF):
5867 CASE_FLT_FN (BUILT_IN_FINITE):
5868 case BUILT_IN_ISFINITE:
5869 case BUILT_IN_ISNORMAL:
5870 target = expand_builtin_interclass_mathfn (exp, target);
5871 if (target)
5872 return target;
5873 break;
5875 CASE_FLT_FN (BUILT_IN_ICEIL):
5876 CASE_FLT_FN (BUILT_IN_LCEIL):
5877 CASE_FLT_FN (BUILT_IN_LLCEIL):
5878 CASE_FLT_FN (BUILT_IN_LFLOOR):
5879 CASE_FLT_FN (BUILT_IN_IFLOOR):
5880 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5881 target = expand_builtin_int_roundingfn (exp, target);
5882 if (target)
5883 return target;
5884 break;
5886 CASE_FLT_FN (BUILT_IN_IRINT):
5887 CASE_FLT_FN (BUILT_IN_LRINT):
5888 CASE_FLT_FN (BUILT_IN_LLRINT):
5889 CASE_FLT_FN (BUILT_IN_IROUND):
5890 CASE_FLT_FN (BUILT_IN_LROUND):
5891 CASE_FLT_FN (BUILT_IN_LLROUND):
5892 target = expand_builtin_int_roundingfn_2 (exp, target);
5893 if (target)
5894 return target;
5895 break;
5897 CASE_FLT_FN (BUILT_IN_POWI):
5898 target = expand_builtin_powi (exp, target);
5899 if (target)
5900 return target;
5901 break;
5903 CASE_FLT_FN (BUILT_IN_ATAN2):
5904 CASE_FLT_FN (BUILT_IN_LDEXP):
5905 CASE_FLT_FN (BUILT_IN_SCALB):
5906 CASE_FLT_FN (BUILT_IN_SCALBN):
5907 CASE_FLT_FN (BUILT_IN_SCALBLN):
5908 if (! flag_unsafe_math_optimizations)
5909 break;
5911 CASE_FLT_FN (BUILT_IN_FMOD):
5912 CASE_FLT_FN (BUILT_IN_REMAINDER):
5913 CASE_FLT_FN (BUILT_IN_DREM):
5914 CASE_FLT_FN (BUILT_IN_POW):
5915 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5916 if (target)
5917 return target;
5918 break;
5920 CASE_FLT_FN (BUILT_IN_CEXPI):
5921 target = expand_builtin_cexpi (exp, target);
5922 gcc_assert (target);
5923 return target;
5925 CASE_FLT_FN (BUILT_IN_SIN):
5926 CASE_FLT_FN (BUILT_IN_COS):
5927 if (! flag_unsafe_math_optimizations)
5928 break;
5929 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5930 if (target)
5931 return target;
5932 break;
5934 CASE_FLT_FN (BUILT_IN_SINCOS):
5935 if (! flag_unsafe_math_optimizations)
5936 break;
5937 target = expand_builtin_sincos (exp);
5938 if (target)
5939 return target;
5940 break;
5942 case BUILT_IN_APPLY_ARGS:
5943 return expand_builtin_apply_args ();
5945 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5946 FUNCTION with a copy of the parameters described by
5947 ARGUMENTS, and ARGSIZE. It returns a block of memory
5948 allocated on the stack into which is stored all the registers
5949 that might possibly be used for returning the result of a
5950 function. ARGUMENTS is the value returned by
5951 __builtin_apply_args. ARGSIZE is the number of bytes of
5952 arguments that must be copied. ??? How should this value be
5953 computed? We'll also need a safe worst case value for varargs
5954 functions. */
5955 case BUILT_IN_APPLY:
5956 if (!validate_arglist (exp, POINTER_TYPE,
5957 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5958 && !validate_arglist (exp, REFERENCE_TYPE,
5959 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5960 return const0_rtx;
5961 else
5963 rtx ops[3];
5965 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5966 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5967 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5969 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5972 /* __builtin_return (RESULT) causes the function to return the
5973 value described by RESULT. RESULT is address of the block of
5974 memory returned by __builtin_apply. */
5975 case BUILT_IN_RETURN:
5976 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5977 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5978 return const0_rtx;
5980 case BUILT_IN_SAVEREGS:
5981 return expand_builtin_saveregs ();
5983 case BUILT_IN_VA_ARG_PACK:
5984 /* All valid uses of __builtin_va_arg_pack () are removed during
5985 inlining. */
5986 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5987 return const0_rtx;
5989 case BUILT_IN_VA_ARG_PACK_LEN:
5990 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5991 inlining. */
5992 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5993 return const0_rtx;
5995 /* Return the address of the first anonymous stack arg. */
5996 case BUILT_IN_NEXT_ARG:
5997 if (fold_builtin_next_arg (exp, false))
5998 return const0_rtx;
5999 return expand_builtin_next_arg ();
6001 case BUILT_IN_CLEAR_CACHE:
6002 target = expand_builtin___clear_cache (exp);
6003 if (target)
6004 return target;
6005 break;
6007 case BUILT_IN_CLASSIFY_TYPE:
6008 return expand_builtin_classify_type (exp);
6010 case BUILT_IN_CONSTANT_P:
6011 return const0_rtx;
6013 case BUILT_IN_FRAME_ADDRESS:
6014 case BUILT_IN_RETURN_ADDRESS:
6015 return expand_builtin_frame_address (fndecl, exp);
6017 /* Returns the address of the area where the structure is returned.
6018 0 otherwise. */
6019 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6020 if (call_expr_nargs (exp) != 0
6021 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6022 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6023 return const0_rtx;
6024 else
6025 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6027 case BUILT_IN_ALLOCA:
6028 case BUILT_IN_ALLOCA_WITH_ALIGN:
6029 /* If the allocation stems from the declaration of a variable-sized
6030 object, it cannot accumulate. */
6031 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6032 if (target)
6033 return target;
6034 break;
6036 case BUILT_IN_STACK_SAVE:
6037 return expand_stack_save ();
6039 case BUILT_IN_STACK_RESTORE:
6040 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6041 return const0_rtx;
6043 case BUILT_IN_BSWAP16:
6044 case BUILT_IN_BSWAP32:
6045 case BUILT_IN_BSWAP64:
6046 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6047 if (target)
6048 return target;
6049 break;
6051 CASE_INT_FN (BUILT_IN_FFS):
6052 target = expand_builtin_unop (target_mode, exp, target,
6053 subtarget, ffs_optab);
6054 if (target)
6055 return target;
6056 break;
6058 CASE_INT_FN (BUILT_IN_CLZ):
6059 target = expand_builtin_unop (target_mode, exp, target,
6060 subtarget, clz_optab);
6061 if (target)
6062 return target;
6063 break;
6065 CASE_INT_FN (BUILT_IN_CTZ):
6066 target = expand_builtin_unop (target_mode, exp, target,
6067 subtarget, ctz_optab);
6068 if (target)
6069 return target;
6070 break;
6072 CASE_INT_FN (BUILT_IN_CLRSB):
6073 target = expand_builtin_unop (target_mode, exp, target,
6074 subtarget, clrsb_optab);
6075 if (target)
6076 return target;
6077 break;
6079 CASE_INT_FN (BUILT_IN_POPCOUNT):
6080 target = expand_builtin_unop (target_mode, exp, target,
6081 subtarget, popcount_optab);
6082 if (target)
6083 return target;
6084 break;
6086 CASE_INT_FN (BUILT_IN_PARITY):
6087 target = expand_builtin_unop (target_mode, exp, target,
6088 subtarget, parity_optab);
6089 if (target)
6090 return target;
6091 break;
6093 case BUILT_IN_STRLEN:
6094 target = expand_builtin_strlen (exp, target, target_mode);
6095 if (target)
6096 return target;
6097 break;
6099 case BUILT_IN_STRCPY:
6100 target = expand_builtin_strcpy (exp, target);
6101 if (target)
6102 return target;
6103 break;
6105 case BUILT_IN_STRNCPY:
6106 target = expand_builtin_strncpy (exp, target);
6107 if (target)
6108 return target;
6109 break;
6111 case BUILT_IN_STPCPY:
6112 target = expand_builtin_stpcpy (exp, target, mode);
6113 if (target)
6114 return target;
6115 break;
6117 case BUILT_IN_MEMCPY:
6118 target = expand_builtin_memcpy (exp, target);
6119 if (target)
6120 return target;
6121 break;
6123 case BUILT_IN_MEMPCPY:
6124 target = expand_builtin_mempcpy (exp, target, mode);
6125 if (target)
6126 return target;
6127 break;
6129 case BUILT_IN_MEMSET:
6130 target = expand_builtin_memset (exp, target, mode);
6131 if (target)
6132 return target;
6133 break;
6135 case BUILT_IN_BZERO:
6136 target = expand_builtin_bzero (exp);
6137 if (target)
6138 return target;
6139 break;
6141 case BUILT_IN_STRCMP:
6142 target = expand_builtin_strcmp (exp, target);
6143 if (target)
6144 return target;
6145 break;
6147 case BUILT_IN_STRNCMP:
6148 target = expand_builtin_strncmp (exp, target, mode);
6149 if (target)
6150 return target;
6151 break;
6153 case BUILT_IN_BCMP:
6154 case BUILT_IN_MEMCMP:
6155 target = expand_builtin_memcmp (exp, target, mode);
6156 if (target)
6157 return target;
6158 break;
6160 case BUILT_IN_SETJMP:
6161 /* This should have been lowered to the builtins below. */
6162 gcc_unreachable ();
6164 case BUILT_IN_SETJMP_SETUP:
6165 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6166 and the receiver label. */
6167 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6169 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6170 VOIDmode, EXPAND_NORMAL);
6171 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6172 rtx label_r = label_rtx (label);
6174 /* This is copied from the handling of non-local gotos. */
6175 expand_builtin_setjmp_setup (buf_addr, label_r);
6176 nonlocal_goto_handler_labels
6177 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6178 nonlocal_goto_handler_labels);
6179 /* ??? Do not let expand_label treat us as such since we would
6180 not want to be both on the list of non-local labels and on
6181 the list of forced labels. */
6182 FORCED_LABEL (label) = 0;
6183 return const0_rtx;
6185 break;
6187 case BUILT_IN_SETJMP_RECEIVER:
6188 /* __builtin_setjmp_receiver is passed the receiver label. */
6189 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6191 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6192 rtx label_r = label_rtx (label);
6194 expand_builtin_setjmp_receiver (label_r);
6195 return const0_rtx;
6197 break;
6199 /* __builtin_longjmp is passed a pointer to an array of five words.
6200 It's similar to the C library longjmp function but works with
6201 __builtin_setjmp above. */
6202 case BUILT_IN_LONGJMP:
6203 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6205 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6206 VOIDmode, EXPAND_NORMAL);
6207 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6209 if (value != const1_rtx)
6211 error ("%<__builtin_longjmp%> second argument must be 1");
6212 return const0_rtx;
6215 expand_builtin_longjmp (buf_addr, value);
6216 return const0_rtx;
6218 break;
6220 case BUILT_IN_NONLOCAL_GOTO:
6221 target = expand_builtin_nonlocal_goto (exp);
6222 if (target)
6223 return target;
6224 break;
6226 /* This updates the setjmp buffer that is its argument with the value
6227 of the current stack pointer. */
6228 case BUILT_IN_UPDATE_SETJMP_BUF:
6229 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6231 rtx buf_addr
6232 = expand_normal (CALL_EXPR_ARG (exp, 0));
6234 expand_builtin_update_setjmp_buf (buf_addr);
6235 return const0_rtx;
6237 break;
6239 case BUILT_IN_TRAP:
6240 expand_builtin_trap ();
6241 return const0_rtx;
6243 case BUILT_IN_UNREACHABLE:
6244 expand_builtin_unreachable ();
6245 return const0_rtx;
6247 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6248 case BUILT_IN_SIGNBITD32:
6249 case BUILT_IN_SIGNBITD64:
6250 case BUILT_IN_SIGNBITD128:
6251 target = expand_builtin_signbit (exp, target);
6252 if (target)
6253 return target;
6254 break;
6256 /* Various hooks for the DWARF 2 __throw routine. */
6257 case BUILT_IN_UNWIND_INIT:
6258 expand_builtin_unwind_init ();
6259 return const0_rtx;
6260 case BUILT_IN_DWARF_CFA:
6261 return virtual_cfa_rtx;
6262 #ifdef DWARF2_UNWIND_INFO
6263 case BUILT_IN_DWARF_SP_COLUMN:
6264 return expand_builtin_dwarf_sp_column ();
6265 case BUILT_IN_INIT_DWARF_REG_SIZES:
6266 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6267 return const0_rtx;
6268 #endif
6269 case BUILT_IN_FROB_RETURN_ADDR:
6270 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6271 case BUILT_IN_EXTRACT_RETURN_ADDR:
6272 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6273 case BUILT_IN_EH_RETURN:
6274 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6275 CALL_EXPR_ARG (exp, 1));
6276 return const0_rtx;
6277 #ifdef EH_RETURN_DATA_REGNO
6278 case BUILT_IN_EH_RETURN_DATA_REGNO:
6279 return expand_builtin_eh_return_data_regno (exp);
6280 #endif
6281 case BUILT_IN_EXTEND_POINTER:
6282 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6283 case BUILT_IN_EH_POINTER:
6284 return expand_builtin_eh_pointer (exp);
6285 case BUILT_IN_EH_FILTER:
6286 return expand_builtin_eh_filter (exp);
6287 case BUILT_IN_EH_COPY_VALUES:
6288 return expand_builtin_eh_copy_values (exp);
6290 case BUILT_IN_VA_START:
6291 return expand_builtin_va_start (exp);
6292 case BUILT_IN_VA_END:
6293 return expand_builtin_va_end (exp);
6294 case BUILT_IN_VA_COPY:
6295 return expand_builtin_va_copy (exp);
6296 case BUILT_IN_EXPECT:
6297 return expand_builtin_expect (exp, target);
6298 case BUILT_IN_ASSUME_ALIGNED:
6299 return expand_builtin_assume_aligned (exp, target);
6300 case BUILT_IN_PREFETCH:
6301 expand_builtin_prefetch (exp);
6302 return const0_rtx;
6304 case BUILT_IN_INIT_TRAMPOLINE:
6305 return expand_builtin_init_trampoline (exp, true);
6306 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6307 return expand_builtin_init_trampoline (exp, false);
6308 case BUILT_IN_ADJUST_TRAMPOLINE:
6309 return expand_builtin_adjust_trampoline (exp);
6311 case BUILT_IN_FORK:
6312 case BUILT_IN_EXECL:
6313 case BUILT_IN_EXECV:
6314 case BUILT_IN_EXECLP:
6315 case BUILT_IN_EXECLE:
6316 case BUILT_IN_EXECVP:
6317 case BUILT_IN_EXECVE:
6318 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6319 if (target)
6320 return target;
6321 break;
6323 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6324 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6325 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6326 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6327 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6328 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6329 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6330 if (target)
6331 return target;
6332 break;
6334 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6335 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6336 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6337 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6338 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6339 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6340 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6341 if (target)
6342 return target;
6343 break;
6345 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6346 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6347 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6348 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6349 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6350 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6351 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6352 if (target)
6353 return target;
6354 break;
6356 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6357 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6358 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6359 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6360 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6361 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6362 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6363 if (target)
6364 return target;
6365 break;
6367 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6368 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6369 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6370 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6371 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6372 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6373 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6374 if (target)
6375 return target;
6376 break;
6378 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6379 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6380 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6381 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6382 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6383 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6384 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6385 if (target)
6386 return target;
6387 break;
6389 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6390 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6391 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6392 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6393 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6394 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6395 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6396 if (target)
6397 return target;
6398 break;
6400 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6401 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6402 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6403 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6404 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6405 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6406 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6407 if (target)
6408 return target;
6409 break;
6411 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6412 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6413 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6414 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6415 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6416 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6417 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6418 if (target)
6419 return target;
6420 break;
6422 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6423 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6424 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6425 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6426 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6427 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6428 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6429 if (target)
6430 return target;
6431 break;
6433 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6434 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6435 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6436 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6437 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6438 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6439 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6440 if (target)
6441 return target;
6442 break;
6444 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6445 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6446 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6447 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6448 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6449 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6450 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6451 if (target)
6452 return target;
6453 break;
6455 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6456 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6457 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6458 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6459 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6460 if (mode == VOIDmode)
6461 mode = TYPE_MODE (boolean_type_node);
6462 if (!target || !register_operand (target, mode))
6463 target = gen_reg_rtx (mode);
6465 mode = get_builtin_sync_mode
6466 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6467 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6468 if (target)
6469 return target;
6470 break;
6472 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6473 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6474 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6475 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6476 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6477 mode = get_builtin_sync_mode
6478 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6479 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6480 if (target)
6481 return target;
6482 break;
6484 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6485 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6486 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6487 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6488 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6489 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6490 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6491 if (target)
6492 return target;
6493 break;
6495 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6496 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6497 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6498 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6499 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6500 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6501 expand_builtin_sync_lock_release (mode, exp);
6502 return const0_rtx;
6504 case BUILT_IN_SYNC_SYNCHRONIZE:
6505 expand_builtin_sync_synchronize ();
6506 return const0_rtx;
6508 case BUILT_IN_ATOMIC_EXCHANGE_1:
6509 case BUILT_IN_ATOMIC_EXCHANGE_2:
6510 case BUILT_IN_ATOMIC_EXCHANGE_4:
6511 case BUILT_IN_ATOMIC_EXCHANGE_8:
6512 case BUILT_IN_ATOMIC_EXCHANGE_16:
6513 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6514 target = expand_builtin_atomic_exchange (mode, exp, target);
6515 if (target)
6516 return target;
6517 break;
6519 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6520 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6521 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6522 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6523 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6525 unsigned int nargs, z;
6526 vec<tree, va_gc> *vec;
6528 mode =
6529 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6530 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6531 if (target)
6532 return target;
6534 /* If this is turned into an external library call, the weak parameter
6535 must be dropped to match the expected parameter list. */
6536 nargs = call_expr_nargs (exp);
6537 vec_alloc (vec, nargs - 1);
6538 for (z = 0; z < 3; z++)
6539 vec->quick_push (CALL_EXPR_ARG (exp, z));
6540 /* Skip the boolean weak parameter. */
6541 for (z = 4; z < 6; z++)
6542 vec->quick_push (CALL_EXPR_ARG (exp, z));
6543 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6544 break;
6547 case BUILT_IN_ATOMIC_LOAD_1:
6548 case BUILT_IN_ATOMIC_LOAD_2:
6549 case BUILT_IN_ATOMIC_LOAD_4:
6550 case BUILT_IN_ATOMIC_LOAD_8:
6551 case BUILT_IN_ATOMIC_LOAD_16:
6552 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6553 target = expand_builtin_atomic_load (mode, exp, target);
6554 if (target)
6555 return target;
6556 break;
6558 case BUILT_IN_ATOMIC_STORE_1:
6559 case BUILT_IN_ATOMIC_STORE_2:
6560 case BUILT_IN_ATOMIC_STORE_4:
6561 case BUILT_IN_ATOMIC_STORE_8:
6562 case BUILT_IN_ATOMIC_STORE_16:
6563 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6564 target = expand_builtin_atomic_store (mode, exp);
6565 if (target)
6566 return const0_rtx;
6567 break;
6569 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6570 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6571 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6572 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6573 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6575 enum built_in_function lib;
6576 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6577 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6578 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6579 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6580 ignore, lib);
6581 if (target)
6582 return target;
6583 break;
6585 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6586 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6587 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6588 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6589 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6591 enum built_in_function lib;
6592 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6593 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6594 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6595 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6596 ignore, lib);
6597 if (target)
6598 return target;
6599 break;
6601 case BUILT_IN_ATOMIC_AND_FETCH_1:
6602 case BUILT_IN_ATOMIC_AND_FETCH_2:
6603 case BUILT_IN_ATOMIC_AND_FETCH_4:
6604 case BUILT_IN_ATOMIC_AND_FETCH_8:
6605 case BUILT_IN_ATOMIC_AND_FETCH_16:
6607 enum built_in_function lib;
6608 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6609 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6610 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6611 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6612 ignore, lib);
6613 if (target)
6614 return target;
6615 break;
6617 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6618 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6619 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6620 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6621 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6623 enum built_in_function lib;
6624 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6625 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6626 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6627 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6628 ignore, lib);
6629 if (target)
6630 return target;
6631 break;
6633 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6634 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6635 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6636 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6637 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6639 enum built_in_function lib;
6640 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6641 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6642 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6643 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6644 ignore, lib);
6645 if (target)
6646 return target;
6647 break;
6649 case BUILT_IN_ATOMIC_OR_FETCH_1:
6650 case BUILT_IN_ATOMIC_OR_FETCH_2:
6651 case BUILT_IN_ATOMIC_OR_FETCH_4:
6652 case BUILT_IN_ATOMIC_OR_FETCH_8:
6653 case BUILT_IN_ATOMIC_OR_FETCH_16:
6655 enum built_in_function lib;
6656 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6657 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6658 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6659 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6660 ignore, lib);
6661 if (target)
6662 return target;
6663 break;
6665 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6666 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6667 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6668 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6669 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6670 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6671 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6672 ignore, BUILT_IN_NONE);
6673 if (target)
6674 return target;
6675 break;
6677 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6678 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6679 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6680 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6681 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6682 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6683 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6684 ignore, BUILT_IN_NONE);
6685 if (target)
6686 return target;
6687 break;
6689 case BUILT_IN_ATOMIC_FETCH_AND_1:
6690 case BUILT_IN_ATOMIC_FETCH_AND_2:
6691 case BUILT_IN_ATOMIC_FETCH_AND_4:
6692 case BUILT_IN_ATOMIC_FETCH_AND_8:
6693 case BUILT_IN_ATOMIC_FETCH_AND_16:
6694 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6695 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6696 ignore, BUILT_IN_NONE);
6697 if (target)
6698 return target;
6699 break;
6701 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6702 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6703 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6704 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6705 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6706 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6707 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6708 ignore, BUILT_IN_NONE);
6709 if (target)
6710 return target;
6711 break;
6713 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6714 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6715 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6716 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6717 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6718 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6719 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6720 ignore, BUILT_IN_NONE);
6721 if (target)
6722 return target;
6723 break;
6725 case BUILT_IN_ATOMIC_FETCH_OR_1:
6726 case BUILT_IN_ATOMIC_FETCH_OR_2:
6727 case BUILT_IN_ATOMIC_FETCH_OR_4:
6728 case BUILT_IN_ATOMIC_FETCH_OR_8:
6729 case BUILT_IN_ATOMIC_FETCH_OR_16:
6730 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6731 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6732 ignore, BUILT_IN_NONE);
6733 if (target)
6734 return target;
6735 break;
6737 case BUILT_IN_ATOMIC_TEST_AND_SET:
6738 return expand_builtin_atomic_test_and_set (exp, target);
6740 case BUILT_IN_ATOMIC_CLEAR:
6741 return expand_builtin_atomic_clear (exp);
6743 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6744 return expand_builtin_atomic_always_lock_free (exp);
6746 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6747 target = expand_builtin_atomic_is_lock_free (exp);
6748 if (target)
6749 return target;
6750 break;
6752 case BUILT_IN_ATOMIC_THREAD_FENCE:
6753 expand_builtin_atomic_thread_fence (exp);
6754 return const0_rtx;
6756 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6757 expand_builtin_atomic_signal_fence (exp);
6758 return const0_rtx;
6760 case BUILT_IN_OBJECT_SIZE:
6761 return expand_builtin_object_size (exp);
6763 case BUILT_IN_MEMCPY_CHK:
6764 case BUILT_IN_MEMPCPY_CHK:
6765 case BUILT_IN_MEMMOVE_CHK:
6766 case BUILT_IN_MEMSET_CHK:
6767 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6768 if (target)
6769 return target;
6770 break;
6772 case BUILT_IN_STRCPY_CHK:
6773 case BUILT_IN_STPCPY_CHK:
6774 case BUILT_IN_STRNCPY_CHK:
6775 case BUILT_IN_STPNCPY_CHK:
6776 case BUILT_IN_STRCAT_CHK:
6777 case BUILT_IN_STRNCAT_CHK:
6778 case BUILT_IN_SNPRINTF_CHK:
6779 case BUILT_IN_VSNPRINTF_CHK:
6780 maybe_emit_chk_warning (exp, fcode);
6781 break;
6783 case BUILT_IN_SPRINTF_CHK:
6784 case BUILT_IN_VSPRINTF_CHK:
6785 maybe_emit_sprintf_chk_warning (exp, fcode);
6786 break;
6788 case BUILT_IN_FREE:
6789 if (warn_free_nonheap_object)
6790 maybe_emit_free_warning (exp);
6791 break;
6793 case BUILT_IN_THREAD_POINTER:
6794 return expand_builtin_thread_pointer (exp, target);
6796 case BUILT_IN_SET_THREAD_POINTER:
6797 expand_builtin_set_thread_pointer (exp);
6798 return const0_rtx;
6800 case BUILT_IN_CILK_DETACH:
6801 expand_builtin_cilk_detach (exp);
6802 return const0_rtx;
6804 case BUILT_IN_CILK_POP_FRAME:
6805 expand_builtin_cilk_pop_frame (exp);
6806 return const0_rtx;
6808 default: /* just do library call, if unknown builtin */
6809 break;
6812 /* The switch statement above can drop through to cause the function
6813 to be called normally. */
6814 return expand_call (exp, target, ignore);
6817 /* Determine whether a tree node represents a call to a built-in
6818 function. If the tree T is a call to a built-in function with
6819 the right number of arguments of the appropriate types, return
6820 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6821 Otherwise the return value is END_BUILTINS. */
6823 enum built_in_function
6824 builtin_mathfn_code (const_tree t)
6826 const_tree fndecl, arg, parmlist;
6827 const_tree argtype, parmtype;
6828 const_call_expr_arg_iterator iter;
6830 if (TREE_CODE (t) != CALL_EXPR
6831 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6832 return END_BUILTINS;
6834 fndecl = get_callee_fndecl (t);
6835 if (fndecl == NULL_TREE
6836 || TREE_CODE (fndecl) != FUNCTION_DECL
6837 || ! DECL_BUILT_IN (fndecl)
6838 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6839 return END_BUILTINS;
6841 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6842 init_const_call_expr_arg_iterator (t, &iter);
6843 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6845 /* If a function doesn't take a variable number of arguments,
6846 the last element in the list will have type `void'. */
6847 parmtype = TREE_VALUE (parmlist);
6848 if (VOID_TYPE_P (parmtype))
6850 if (more_const_call_expr_args_p (&iter))
6851 return END_BUILTINS;
6852 return DECL_FUNCTION_CODE (fndecl);
6855 if (! more_const_call_expr_args_p (&iter))
6856 return END_BUILTINS;
6858 arg = next_const_call_expr_arg (&iter);
6859 argtype = TREE_TYPE (arg);
6861 if (SCALAR_FLOAT_TYPE_P (parmtype))
6863 if (! SCALAR_FLOAT_TYPE_P (argtype))
6864 return END_BUILTINS;
6866 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6868 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6869 return END_BUILTINS;
6871 else if (POINTER_TYPE_P (parmtype))
6873 if (! POINTER_TYPE_P (argtype))
6874 return END_BUILTINS;
6876 else if (INTEGRAL_TYPE_P (parmtype))
6878 if (! INTEGRAL_TYPE_P (argtype))
6879 return END_BUILTINS;
6881 else
6882 return END_BUILTINS;
6885 /* Variable-length argument list. */
6886 return DECL_FUNCTION_CODE (fndecl);
6889 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6890 evaluate to a constant. */
6892 static tree
6893 fold_builtin_constant_p (tree arg)
6895 /* We return 1 for a numeric type that's known to be a constant
6896 value at compile-time or for an aggregate type that's a
6897 literal constant. */
6898 STRIP_NOPS (arg);
6900 /* If we know this is a constant, emit the constant of one. */
6901 if (CONSTANT_CLASS_P (arg)
6902 || (TREE_CODE (arg) == CONSTRUCTOR
6903 && TREE_CONSTANT (arg)))
6904 return integer_one_node;
6905 if (TREE_CODE (arg) == ADDR_EXPR)
6907 tree op = TREE_OPERAND (arg, 0);
6908 if (TREE_CODE (op) == STRING_CST
6909 || (TREE_CODE (op) == ARRAY_REF
6910 && integer_zerop (TREE_OPERAND (op, 1))
6911 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6912 return integer_one_node;
6915 /* If this expression has side effects, show we don't know it to be a
6916 constant. Likewise if it's a pointer or aggregate type since in
6917 those case we only want literals, since those are only optimized
6918 when generating RTL, not later.
6919 And finally, if we are compiling an initializer, not code, we
6920 need to return a definite result now; there's not going to be any
6921 more optimization done. */
6922 if (TREE_SIDE_EFFECTS (arg)
6923 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6924 || POINTER_TYPE_P (TREE_TYPE (arg))
6925 || cfun == 0
6926 || folding_initializer
6927 || force_folding_builtin_constant_p)
6928 return integer_zero_node;
6930 return NULL_TREE;
6933 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6934 return it as a truthvalue. */
6936 static tree
6937 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
6938 tree predictor)
6940 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6942 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6943 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6944 ret_type = TREE_TYPE (TREE_TYPE (fn));
6945 pred_type = TREE_VALUE (arg_types);
6946 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6948 pred = fold_convert_loc (loc, pred_type, pred);
6949 expected = fold_convert_loc (loc, expected_type, expected);
6950 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
6951 predictor);
6953 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6954 build_int_cst (ret_type, 0));
6957 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6958 NULL_TREE if no simplification is possible. */
6960 tree
6961 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
6963 tree inner, fndecl, inner_arg0;
6964 enum tree_code code;
6966 /* Distribute the expected value over short-circuiting operators.
6967 See through the cast from truthvalue_type_node to long. */
6968 inner_arg0 = arg0;
6969 while (TREE_CODE (inner_arg0) == NOP_EXPR
6970 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
6971 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
6972 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
6974 /* If this is a builtin_expect within a builtin_expect keep the
6975 inner one. See through a comparison against a constant. It
6976 might have been added to create a thruthvalue. */
6977 inner = inner_arg0;
6979 if (COMPARISON_CLASS_P (inner)
6980 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6981 inner = TREE_OPERAND (inner, 0);
6983 if (TREE_CODE (inner) == CALL_EXPR
6984 && (fndecl = get_callee_fndecl (inner))
6985 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6986 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6987 return arg0;
6989 inner = inner_arg0;
6990 code = TREE_CODE (inner);
6991 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6993 tree op0 = TREE_OPERAND (inner, 0);
6994 tree op1 = TREE_OPERAND (inner, 1);
6996 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
6997 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
6998 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7000 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7003 /* If the argument isn't invariant then there's nothing else we can do. */
7004 if (!TREE_CONSTANT (inner_arg0))
7005 return NULL_TREE;
7007 /* If we expect that a comparison against the argument will fold to
7008 a constant return the constant. In practice, this means a true
7009 constant or the address of a non-weak symbol. */
7010 inner = inner_arg0;
7011 STRIP_NOPS (inner);
7012 if (TREE_CODE (inner) == ADDR_EXPR)
7016 inner = TREE_OPERAND (inner, 0);
7018 while (TREE_CODE (inner) == COMPONENT_REF
7019 || TREE_CODE (inner) == ARRAY_REF);
7020 if ((TREE_CODE (inner) == VAR_DECL
7021 || TREE_CODE (inner) == FUNCTION_DECL)
7022 && DECL_WEAK (inner))
7023 return NULL_TREE;
7026 /* Otherwise, ARG0 already has the proper type for the return value. */
7027 return arg0;
7030 /* Fold a call to __builtin_classify_type with argument ARG. */
7032 static tree
7033 fold_builtin_classify_type (tree arg)
7035 if (arg == 0)
7036 return build_int_cst (integer_type_node, no_type_class);
7038 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7041 /* Fold a call to __builtin_strlen with argument ARG. */
7043 static tree
7044 fold_builtin_strlen (location_t loc, tree type, tree arg)
7046 if (!validate_arg (arg, POINTER_TYPE))
7047 return NULL_TREE;
7048 else
7050 tree len = c_strlen (arg, 0);
7052 if (len)
7053 return fold_convert_loc (loc, type, len);
7055 return NULL_TREE;
7059 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7061 static tree
7062 fold_builtin_inf (location_t loc, tree type, int warn)
7064 REAL_VALUE_TYPE real;
7066 /* __builtin_inff is intended to be usable to define INFINITY on all
7067 targets. If an infinity is not available, INFINITY expands "to a
7068 positive constant of type float that overflows at translation
7069 time", footnote "In this case, using INFINITY will violate the
7070 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7071 Thus we pedwarn to ensure this constraint violation is
7072 diagnosed. */
7073 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7074 pedwarn (loc, 0, "target format does not support infinity");
7076 real_inf (&real);
7077 return build_real (type, real);
7080 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7082 static tree
7083 fold_builtin_nan (tree arg, tree type, int quiet)
7085 REAL_VALUE_TYPE real;
7086 const char *str;
7088 if (!validate_arg (arg, POINTER_TYPE))
7089 return NULL_TREE;
7090 str = c_getstr (arg);
7091 if (!str)
7092 return NULL_TREE;
7094 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7095 return NULL_TREE;
7097 return build_real (type, real);
7100 /* Return true if the floating point expression T has an integer value.
7101 We also allow +Inf, -Inf and NaN to be considered integer values. */
7103 static bool
7104 integer_valued_real_p (tree t)
7106 switch (TREE_CODE (t))
7108 case FLOAT_EXPR:
7109 return true;
7111 case ABS_EXPR:
7112 case SAVE_EXPR:
7113 return integer_valued_real_p (TREE_OPERAND (t, 0));
7115 case COMPOUND_EXPR:
7116 case MODIFY_EXPR:
7117 case BIND_EXPR:
7118 return integer_valued_real_p (TREE_OPERAND (t, 1));
7120 case PLUS_EXPR:
7121 case MINUS_EXPR:
7122 case MULT_EXPR:
7123 case MIN_EXPR:
7124 case MAX_EXPR:
7125 return integer_valued_real_p (TREE_OPERAND (t, 0))
7126 && integer_valued_real_p (TREE_OPERAND (t, 1));
7128 case COND_EXPR:
7129 return integer_valued_real_p (TREE_OPERAND (t, 1))
7130 && integer_valued_real_p (TREE_OPERAND (t, 2));
7132 case REAL_CST:
7133 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7135 case NOP_EXPR:
7137 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7138 if (TREE_CODE (type) == INTEGER_TYPE)
7139 return true;
7140 if (TREE_CODE (type) == REAL_TYPE)
7141 return integer_valued_real_p (TREE_OPERAND (t, 0));
7142 break;
7145 case CALL_EXPR:
7146 switch (builtin_mathfn_code (t))
7148 CASE_FLT_FN (BUILT_IN_CEIL):
7149 CASE_FLT_FN (BUILT_IN_FLOOR):
7150 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7151 CASE_FLT_FN (BUILT_IN_RINT):
7152 CASE_FLT_FN (BUILT_IN_ROUND):
7153 CASE_FLT_FN (BUILT_IN_TRUNC):
7154 return true;
7156 CASE_FLT_FN (BUILT_IN_FMIN):
7157 CASE_FLT_FN (BUILT_IN_FMAX):
7158 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7159 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7161 default:
7162 break;
7164 break;
7166 default:
7167 break;
7169 return false;
7172 /* FNDECL is assumed to be a builtin where truncation can be propagated
7173 across (for instance floor((double)f) == (double)floorf (f).
7174 Do the transformation for a call with argument ARG. */
7176 static tree
7177 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7179 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7181 if (!validate_arg (arg, REAL_TYPE))
7182 return NULL_TREE;
7184 /* Integer rounding functions are idempotent. */
7185 if (fcode == builtin_mathfn_code (arg))
7186 return arg;
7188 /* If argument is already integer valued, and we don't need to worry
7189 about setting errno, there's no need to perform rounding. */
7190 if (! flag_errno_math && integer_valued_real_p (arg))
7191 return arg;
7193 if (optimize)
7195 tree arg0 = strip_float_extensions (arg);
7196 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7197 tree newtype = TREE_TYPE (arg0);
7198 tree decl;
7200 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7201 && (decl = mathfn_built_in (newtype, fcode)))
7202 return fold_convert_loc (loc, ftype,
7203 build_call_expr_loc (loc, decl, 1,
7204 fold_convert_loc (loc,
7205 newtype,
7206 arg0)));
7208 return NULL_TREE;
7211 /* FNDECL is assumed to be builtin which can narrow the FP type of
7212 the argument, for instance lround((double)f) -> lroundf (f).
7213 Do the transformation for a call with argument ARG. */
7215 static tree
7216 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7218 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7220 if (!validate_arg (arg, REAL_TYPE))
7221 return NULL_TREE;
7223 /* If argument is already integer valued, and we don't need to worry
7224 about setting errno, there's no need to perform rounding. */
7225 if (! flag_errno_math && integer_valued_real_p (arg))
7226 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7227 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7229 if (optimize)
7231 tree ftype = TREE_TYPE (arg);
7232 tree arg0 = strip_float_extensions (arg);
7233 tree newtype = TREE_TYPE (arg0);
7234 tree decl;
7236 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7237 && (decl = mathfn_built_in (newtype, fcode)))
7238 return build_call_expr_loc (loc, decl, 1,
7239 fold_convert_loc (loc, newtype, arg0));
7242 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7243 sizeof (int) == sizeof (long). */
7244 if (TYPE_PRECISION (integer_type_node)
7245 == TYPE_PRECISION (long_integer_type_node))
7247 tree newfn = NULL_TREE;
7248 switch (fcode)
7250 CASE_FLT_FN (BUILT_IN_ICEIL):
7251 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7252 break;
7254 CASE_FLT_FN (BUILT_IN_IFLOOR):
7255 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7256 break;
7258 CASE_FLT_FN (BUILT_IN_IROUND):
7259 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7260 break;
7262 CASE_FLT_FN (BUILT_IN_IRINT):
7263 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7264 break;
7266 default:
7267 break;
7270 if (newfn)
7272 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7273 return fold_convert_loc (loc,
7274 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7278 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7279 sizeof (long long) == sizeof (long). */
7280 if (TYPE_PRECISION (long_long_integer_type_node)
7281 == TYPE_PRECISION (long_integer_type_node))
7283 tree newfn = NULL_TREE;
7284 switch (fcode)
7286 CASE_FLT_FN (BUILT_IN_LLCEIL):
7287 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7288 break;
7290 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7291 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7292 break;
7294 CASE_FLT_FN (BUILT_IN_LLROUND):
7295 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7296 break;
7298 CASE_FLT_FN (BUILT_IN_LLRINT):
7299 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7300 break;
7302 default:
7303 break;
7306 if (newfn)
7308 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7309 return fold_convert_loc (loc,
7310 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7314 return NULL_TREE;
7317 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7318 return type. Return NULL_TREE if no simplification can be made. */
7320 static tree
7321 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7323 tree res;
7325 if (!validate_arg (arg, COMPLEX_TYPE)
7326 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7327 return NULL_TREE;
7329 /* Calculate the result when the argument is a constant. */
7330 if (TREE_CODE (arg) == COMPLEX_CST
7331 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7332 type, mpfr_hypot)))
7333 return res;
7335 if (TREE_CODE (arg) == COMPLEX_EXPR)
7337 tree real = TREE_OPERAND (arg, 0);
7338 tree imag = TREE_OPERAND (arg, 1);
7340 /* If either part is zero, cabs is fabs of the other. */
7341 if (real_zerop (real))
7342 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7343 if (real_zerop (imag))
7344 return fold_build1_loc (loc, ABS_EXPR, type, real);
7346 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7347 if (flag_unsafe_math_optimizations
7348 && operand_equal_p (real, imag, OEP_PURE_SAME))
7350 const REAL_VALUE_TYPE sqrt2_trunc
7351 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7352 STRIP_NOPS (real);
7353 return fold_build2_loc (loc, MULT_EXPR, type,
7354 fold_build1_loc (loc, ABS_EXPR, type, real),
7355 build_real (type, sqrt2_trunc));
7359 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7360 if (TREE_CODE (arg) == NEGATE_EXPR
7361 || TREE_CODE (arg) == CONJ_EXPR)
7362 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7364 /* Don't do this when optimizing for size. */
7365 if (flag_unsafe_math_optimizations
7366 && optimize && optimize_function_for_speed_p (cfun))
7368 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7370 if (sqrtfn != NULL_TREE)
7372 tree rpart, ipart, result;
7374 arg = builtin_save_expr (arg);
7376 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7377 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7379 rpart = builtin_save_expr (rpart);
7380 ipart = builtin_save_expr (ipart);
7382 result = fold_build2_loc (loc, PLUS_EXPR, type,
7383 fold_build2_loc (loc, MULT_EXPR, type,
7384 rpart, rpart),
7385 fold_build2_loc (loc, MULT_EXPR, type,
7386 ipart, ipart));
7388 return build_call_expr_loc (loc, sqrtfn, 1, result);
7392 return NULL_TREE;
7395 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7396 complex tree type of the result. If NEG is true, the imaginary
7397 zero is negative. */
7399 static tree
7400 build_complex_cproj (tree type, bool neg)
7402 REAL_VALUE_TYPE rinf, rzero = dconst0;
7404 real_inf (&rinf);
7405 rzero.sign = neg;
7406 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7407 build_real (TREE_TYPE (type), rzero));
7410 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7411 return type. Return NULL_TREE if no simplification can be made. */
7413 static tree
7414 fold_builtin_cproj (location_t loc, tree arg, tree type)
7416 if (!validate_arg (arg, COMPLEX_TYPE)
7417 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7418 return NULL_TREE;
7420 /* If there are no infinities, return arg. */
7421 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7422 return non_lvalue_loc (loc, arg);
7424 /* Calculate the result when the argument is a constant. */
7425 if (TREE_CODE (arg) == COMPLEX_CST)
7427 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7428 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7430 if (real_isinf (real) || real_isinf (imag))
7431 return build_complex_cproj (type, imag->sign);
7432 else
7433 return arg;
7435 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7437 tree real = TREE_OPERAND (arg, 0);
7438 tree imag = TREE_OPERAND (arg, 1);
7440 STRIP_NOPS (real);
7441 STRIP_NOPS (imag);
7443 /* If the real part is inf and the imag part is known to be
7444 nonnegative, return (inf + 0i). Remember side-effects are
7445 possible in the imag part. */
7446 if (TREE_CODE (real) == REAL_CST
7447 && real_isinf (TREE_REAL_CST_PTR (real))
7448 && tree_expr_nonnegative_p (imag))
7449 return omit_one_operand_loc (loc, type,
7450 build_complex_cproj (type, false),
7451 arg);
7453 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7454 Remember side-effects are possible in the real part. */
7455 if (TREE_CODE (imag) == REAL_CST
7456 && real_isinf (TREE_REAL_CST_PTR (imag)))
7457 return
7458 omit_one_operand_loc (loc, type,
7459 build_complex_cproj (type, TREE_REAL_CST_PTR
7460 (imag)->sign), arg);
7463 return NULL_TREE;
7466 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7467 Return NULL_TREE if no simplification can be made. */
7469 static tree
7470 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7473 enum built_in_function fcode;
7474 tree res;
7476 if (!validate_arg (arg, REAL_TYPE))
7477 return NULL_TREE;
7479 /* Calculate the result when the argument is a constant. */
7480 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7481 return res;
7483 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7484 fcode = builtin_mathfn_code (arg);
7485 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7487 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7488 arg = fold_build2_loc (loc, MULT_EXPR, type,
7489 CALL_EXPR_ARG (arg, 0),
7490 build_real (type, dconsthalf));
7491 return build_call_expr_loc (loc, expfn, 1, arg);
7494 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7495 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7497 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7499 if (powfn)
7501 tree arg0 = CALL_EXPR_ARG (arg, 0);
7502 tree tree_root;
7503 /* The inner root was either sqrt or cbrt. */
7504 /* This was a conditional expression but it triggered a bug
7505 in Sun C 5.5. */
7506 REAL_VALUE_TYPE dconstroot;
7507 if (BUILTIN_SQRT_P (fcode))
7508 dconstroot = dconsthalf;
7509 else
7510 dconstroot = dconst_third ();
7512 /* Adjust for the outer root. */
7513 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7514 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7515 tree_root = build_real (type, dconstroot);
7516 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7520 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7521 if (flag_unsafe_math_optimizations
7522 && (fcode == BUILT_IN_POW
7523 || fcode == BUILT_IN_POWF
7524 || fcode == BUILT_IN_POWL))
7526 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7527 tree arg0 = CALL_EXPR_ARG (arg, 0);
7528 tree arg1 = CALL_EXPR_ARG (arg, 1);
7529 tree narg1;
7530 if (!tree_expr_nonnegative_p (arg0))
7531 arg0 = build1 (ABS_EXPR, type, arg0);
7532 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7533 build_real (type, dconsthalf));
7534 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7537 return NULL_TREE;
7540 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7541 Return NULL_TREE if no simplification can be made. */
7543 static tree
7544 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7546 const enum built_in_function fcode = builtin_mathfn_code (arg);
7547 tree res;
7549 if (!validate_arg (arg, REAL_TYPE))
7550 return NULL_TREE;
7552 /* Calculate the result when the argument is a constant. */
7553 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7554 return res;
7556 if (flag_unsafe_math_optimizations)
7558 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7559 if (BUILTIN_EXPONENT_P (fcode))
7561 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7562 const REAL_VALUE_TYPE third_trunc =
7563 real_value_truncate (TYPE_MODE (type), dconst_third ());
7564 arg = fold_build2_loc (loc, MULT_EXPR, type,
7565 CALL_EXPR_ARG (arg, 0),
7566 build_real (type, third_trunc));
7567 return build_call_expr_loc (loc, expfn, 1, arg);
7570 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7571 if (BUILTIN_SQRT_P (fcode))
7573 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7575 if (powfn)
7577 tree arg0 = CALL_EXPR_ARG (arg, 0);
7578 tree tree_root;
7579 REAL_VALUE_TYPE dconstroot = dconst_third ();
7581 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7582 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7583 tree_root = build_real (type, dconstroot);
7584 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7588 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7589 if (BUILTIN_CBRT_P (fcode))
7591 tree arg0 = CALL_EXPR_ARG (arg, 0);
7592 if (tree_expr_nonnegative_p (arg0))
7594 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7596 if (powfn)
7598 tree tree_root;
7599 REAL_VALUE_TYPE dconstroot;
7601 real_arithmetic (&dconstroot, MULT_EXPR,
7602 dconst_third_ptr (), dconst_third_ptr ());
7603 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7604 tree_root = build_real (type, dconstroot);
7605 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7610 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7611 if (fcode == BUILT_IN_POW
7612 || fcode == BUILT_IN_POWF
7613 || fcode == BUILT_IN_POWL)
7615 tree arg00 = CALL_EXPR_ARG (arg, 0);
7616 tree arg01 = CALL_EXPR_ARG (arg, 1);
7617 if (tree_expr_nonnegative_p (arg00))
7619 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7620 const REAL_VALUE_TYPE dconstroot
7621 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7622 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7623 build_real (type, dconstroot));
7624 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7628 return NULL_TREE;
7631 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7632 TYPE is the type of the return value. Return NULL_TREE if no
7633 simplification can be made. */
7635 static tree
7636 fold_builtin_cos (location_t loc,
7637 tree arg, tree type, tree fndecl)
7639 tree res, narg;
7641 if (!validate_arg (arg, REAL_TYPE))
7642 return NULL_TREE;
7644 /* Calculate the result when the argument is a constant. */
7645 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7646 return res;
7648 /* Optimize cos(-x) into cos (x). */
7649 if ((narg = fold_strip_sign_ops (arg)))
7650 return build_call_expr_loc (loc, fndecl, 1, narg);
7652 return NULL_TREE;
7655 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7656 Return NULL_TREE if no simplification can be made. */
7658 static tree
7659 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7661 if (validate_arg (arg, REAL_TYPE))
7663 tree res, narg;
7665 /* Calculate the result when the argument is a constant. */
7666 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7667 return res;
7669 /* Optimize cosh(-x) into cosh (x). */
7670 if ((narg = fold_strip_sign_ops (arg)))
7671 return build_call_expr_loc (loc, fndecl, 1, narg);
7674 return NULL_TREE;
7677 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7678 argument ARG. TYPE is the type of the return value. Return
7679 NULL_TREE if no simplification can be made. */
7681 static tree
7682 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7683 bool hyper)
7685 if (validate_arg (arg, COMPLEX_TYPE)
7686 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7688 tree tmp;
7690 /* Calculate the result when the argument is a constant. */
7691 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7692 return tmp;
7694 /* Optimize fn(-x) into fn(x). */
7695 if ((tmp = fold_strip_sign_ops (arg)))
7696 return build_call_expr_loc (loc, fndecl, 1, tmp);
7699 return NULL_TREE;
7702 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7703 Return NULL_TREE if no simplification can be made. */
7705 static tree
7706 fold_builtin_tan (tree arg, tree type)
7708 enum built_in_function fcode;
7709 tree res;
7711 if (!validate_arg (arg, REAL_TYPE))
7712 return NULL_TREE;
7714 /* Calculate the result when the argument is a constant. */
7715 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7716 return res;
7718 /* Optimize tan(atan(x)) = x. */
7719 fcode = builtin_mathfn_code (arg);
7720 if (flag_unsafe_math_optimizations
7721 && (fcode == BUILT_IN_ATAN
7722 || fcode == BUILT_IN_ATANF
7723 || fcode == BUILT_IN_ATANL))
7724 return CALL_EXPR_ARG (arg, 0);
7726 return NULL_TREE;
7729 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7730 NULL_TREE if no simplification can be made. */
7732 static tree
7733 fold_builtin_sincos (location_t loc,
7734 tree arg0, tree arg1, tree arg2)
7736 tree type;
7737 tree res, fn, call;
7739 if (!validate_arg (arg0, REAL_TYPE)
7740 || !validate_arg (arg1, POINTER_TYPE)
7741 || !validate_arg (arg2, POINTER_TYPE))
7742 return NULL_TREE;
7744 type = TREE_TYPE (arg0);
7746 /* Calculate the result when the argument is a constant. */
7747 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7748 return res;
7750 /* Canonicalize sincos to cexpi. */
7751 if (!targetm.libc_has_function (function_c99_math_complex))
7752 return NULL_TREE;
7753 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7754 if (!fn)
7755 return NULL_TREE;
7757 call = build_call_expr_loc (loc, fn, 1, arg0);
7758 call = builtin_save_expr (call);
7760 return build2 (COMPOUND_EXPR, void_type_node,
7761 build2 (MODIFY_EXPR, void_type_node,
7762 build_fold_indirect_ref_loc (loc, arg1),
7763 build1 (IMAGPART_EXPR, type, call)),
7764 build2 (MODIFY_EXPR, void_type_node,
7765 build_fold_indirect_ref_loc (loc, arg2),
7766 build1 (REALPART_EXPR, type, call)));
7769 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7770 NULL_TREE if no simplification can be made. */
7772 static tree
7773 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7775 tree rtype;
7776 tree realp, imagp, ifn;
7777 tree res;
7779 if (!validate_arg (arg0, COMPLEX_TYPE)
7780 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7781 return NULL_TREE;
7783 /* Calculate the result when the argument is a constant. */
7784 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7785 return res;
7787 rtype = TREE_TYPE (TREE_TYPE (arg0));
7789 /* In case we can figure out the real part of arg0 and it is constant zero
7790 fold to cexpi. */
7791 if (!targetm.libc_has_function (function_c99_math_complex))
7792 return NULL_TREE;
7793 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7794 if (!ifn)
7795 return NULL_TREE;
7797 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7798 && real_zerop (realp))
7800 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7801 return build_call_expr_loc (loc, ifn, 1, narg);
7804 /* In case we can easily decompose real and imaginary parts split cexp
7805 to exp (r) * cexpi (i). */
7806 if (flag_unsafe_math_optimizations
7807 && realp)
7809 tree rfn, rcall, icall;
7811 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7812 if (!rfn)
7813 return NULL_TREE;
7815 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7816 if (!imagp)
7817 return NULL_TREE;
7819 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7820 icall = builtin_save_expr (icall);
7821 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7822 rcall = builtin_save_expr (rcall);
7823 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7824 fold_build2_loc (loc, MULT_EXPR, rtype,
7825 rcall,
7826 fold_build1_loc (loc, REALPART_EXPR,
7827 rtype, icall)),
7828 fold_build2_loc (loc, MULT_EXPR, rtype,
7829 rcall,
7830 fold_build1_loc (loc, IMAGPART_EXPR,
7831 rtype, icall)));
7834 return NULL_TREE;
7837 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7838 Return NULL_TREE if no simplification can be made. */
7840 static tree
7841 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7843 if (!validate_arg (arg, REAL_TYPE))
7844 return NULL_TREE;
7846 /* Optimize trunc of constant value. */
7847 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7849 REAL_VALUE_TYPE r, x;
7850 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7852 x = TREE_REAL_CST (arg);
7853 real_trunc (&r, TYPE_MODE (type), &x);
7854 return build_real (type, r);
7857 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7860 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7861 Return NULL_TREE if no simplification can be made. */
7863 static tree
7864 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7866 if (!validate_arg (arg, REAL_TYPE))
7867 return NULL_TREE;
7869 /* Optimize floor of constant value. */
7870 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7872 REAL_VALUE_TYPE x;
7874 x = TREE_REAL_CST (arg);
7875 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7877 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7878 REAL_VALUE_TYPE r;
7880 real_floor (&r, TYPE_MODE (type), &x);
7881 return build_real (type, r);
7885 /* Fold floor (x) where x is nonnegative to trunc (x). */
7886 if (tree_expr_nonnegative_p (arg))
7888 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7889 if (truncfn)
7890 return build_call_expr_loc (loc, truncfn, 1, arg);
7893 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7896 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7897 Return NULL_TREE if no simplification can be made. */
7899 static tree
7900 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7902 if (!validate_arg (arg, REAL_TYPE))
7903 return NULL_TREE;
7905 /* Optimize ceil of constant value. */
7906 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7908 REAL_VALUE_TYPE x;
7910 x = TREE_REAL_CST (arg);
7911 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7913 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7914 REAL_VALUE_TYPE r;
7916 real_ceil (&r, TYPE_MODE (type), &x);
7917 return build_real (type, r);
7921 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7924 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7925 Return NULL_TREE if no simplification can be made. */
7927 static tree
7928 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7930 if (!validate_arg (arg, REAL_TYPE))
7931 return NULL_TREE;
7933 /* Optimize round of constant value. */
7934 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7936 REAL_VALUE_TYPE x;
7938 x = TREE_REAL_CST (arg);
7939 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7941 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7942 REAL_VALUE_TYPE r;
7944 real_round (&r, TYPE_MODE (type), &x);
7945 return build_real (type, r);
7949 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7952 /* Fold function call to builtin lround, lroundf or lroundl (or the
7953 corresponding long long versions) and other rounding functions. ARG
7954 is the argument to the call. Return NULL_TREE if no simplification
7955 can be made. */
7957 static tree
7958 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7960 if (!validate_arg (arg, REAL_TYPE))
7961 return NULL_TREE;
7963 /* Optimize lround of constant value. */
7964 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7966 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7968 if (real_isfinite (&x))
7970 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7971 tree ftype = TREE_TYPE (arg);
7972 REAL_VALUE_TYPE r;
7973 bool fail = false;
7975 switch (DECL_FUNCTION_CODE (fndecl))
7977 CASE_FLT_FN (BUILT_IN_IFLOOR):
7978 CASE_FLT_FN (BUILT_IN_LFLOOR):
7979 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7980 real_floor (&r, TYPE_MODE (ftype), &x);
7981 break;
7983 CASE_FLT_FN (BUILT_IN_ICEIL):
7984 CASE_FLT_FN (BUILT_IN_LCEIL):
7985 CASE_FLT_FN (BUILT_IN_LLCEIL):
7986 real_ceil (&r, TYPE_MODE (ftype), &x);
7987 break;
7989 CASE_FLT_FN (BUILT_IN_IROUND):
7990 CASE_FLT_FN (BUILT_IN_LROUND):
7991 CASE_FLT_FN (BUILT_IN_LLROUND):
7992 real_round (&r, TYPE_MODE (ftype), &x);
7993 break;
7995 default:
7996 gcc_unreachable ();
7999 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8000 if (!fail)
8001 return wide_int_to_tree (itype, val);
8005 switch (DECL_FUNCTION_CODE (fndecl))
8007 CASE_FLT_FN (BUILT_IN_LFLOOR):
8008 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8009 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8010 if (tree_expr_nonnegative_p (arg))
8011 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8012 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8013 break;
8014 default:;
8017 return fold_fixed_mathfn (loc, fndecl, arg);
8020 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8021 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8022 the argument to the call. Return NULL_TREE if no simplification can
8023 be made. */
8025 static tree
8026 fold_builtin_bitop (tree fndecl, tree arg)
8028 if (!validate_arg (arg, INTEGER_TYPE))
8029 return NULL_TREE;
8031 /* Optimize for constant argument. */
8032 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8034 tree type = TREE_TYPE (arg);
8035 int result;
8037 switch (DECL_FUNCTION_CODE (fndecl))
8039 CASE_INT_FN (BUILT_IN_FFS):
8040 result = wi::ffs (arg);
8041 break;
8043 CASE_INT_FN (BUILT_IN_CLZ):
8044 if (wi::ne_p (arg, 0))
8045 result = wi::clz (arg);
8046 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8047 result = TYPE_PRECISION (type);
8048 break;
8050 CASE_INT_FN (BUILT_IN_CTZ):
8051 if (wi::ne_p (arg, 0))
8052 result = wi::ctz (arg);
8053 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8054 result = TYPE_PRECISION (type);
8055 break;
8057 CASE_INT_FN (BUILT_IN_CLRSB):
8058 result = wi::clrsb (arg);
8059 break;
8061 CASE_INT_FN (BUILT_IN_POPCOUNT):
8062 result = wi::popcount (arg);
8063 break;
8065 CASE_INT_FN (BUILT_IN_PARITY):
8066 result = wi::parity (arg);
8067 break;
8069 default:
8070 gcc_unreachable ();
8073 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8076 return NULL_TREE;
8079 /* Fold function call to builtin_bswap and the short, long and long long
8080 variants. Return NULL_TREE if no simplification can be made. */
8081 static tree
8082 fold_builtin_bswap (tree fndecl, tree arg)
8084 if (! validate_arg (arg, INTEGER_TYPE))
8085 return NULL_TREE;
8087 /* Optimize constant value. */
8088 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8090 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8092 switch (DECL_FUNCTION_CODE (fndecl))
8094 case BUILT_IN_BSWAP16:
8095 case BUILT_IN_BSWAP32:
8096 case BUILT_IN_BSWAP64:
8098 signop sgn = TYPE_SIGN (type);
8099 tree result =
8100 wide_int_to_tree (type,
8101 wide_int::from (arg, TYPE_PRECISION (type),
8102 sgn).bswap ());
8103 return result;
8105 default:
8106 gcc_unreachable ();
8110 return NULL_TREE;
8113 /* A subroutine of fold_builtin to fold the various logarithmic
8114 functions. Return NULL_TREE if no simplification can me made.
8115 FUNC is the corresponding MPFR logarithm function. */
8117 static tree
8118 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8119 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8121 if (validate_arg (arg, REAL_TYPE))
8123 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8124 tree res;
8125 const enum built_in_function fcode = builtin_mathfn_code (arg);
8127 /* Calculate the result when the argument is a constant. */
8128 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8129 return res;
8131 /* Special case, optimize logN(expN(x)) = x. */
8132 if (flag_unsafe_math_optimizations
8133 && ((func == mpfr_log
8134 && (fcode == BUILT_IN_EXP
8135 || fcode == BUILT_IN_EXPF
8136 || fcode == BUILT_IN_EXPL))
8137 || (func == mpfr_log2
8138 && (fcode == BUILT_IN_EXP2
8139 || fcode == BUILT_IN_EXP2F
8140 || fcode == BUILT_IN_EXP2L))
8141 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8142 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8144 /* Optimize logN(func()) for various exponential functions. We
8145 want to determine the value "x" and the power "exponent" in
8146 order to transform logN(x**exponent) into exponent*logN(x). */
8147 if (flag_unsafe_math_optimizations)
8149 tree exponent = 0, x = 0;
8151 switch (fcode)
8153 CASE_FLT_FN (BUILT_IN_EXP):
8154 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8155 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8156 dconst_e ()));
8157 exponent = CALL_EXPR_ARG (arg, 0);
8158 break;
8159 CASE_FLT_FN (BUILT_IN_EXP2):
8160 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8161 x = build_real (type, dconst2);
8162 exponent = CALL_EXPR_ARG (arg, 0);
8163 break;
8164 CASE_FLT_FN (BUILT_IN_EXP10):
8165 CASE_FLT_FN (BUILT_IN_POW10):
8166 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8168 REAL_VALUE_TYPE dconst10;
8169 real_from_integer (&dconst10, VOIDmode, 10, SIGNED);
8170 x = build_real (type, dconst10);
8172 exponent = CALL_EXPR_ARG (arg, 0);
8173 break;
8174 CASE_FLT_FN (BUILT_IN_SQRT):
8175 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8176 x = CALL_EXPR_ARG (arg, 0);
8177 exponent = build_real (type, dconsthalf);
8178 break;
8179 CASE_FLT_FN (BUILT_IN_CBRT):
8180 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8181 x = CALL_EXPR_ARG (arg, 0);
8182 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8183 dconst_third ()));
8184 break;
8185 CASE_FLT_FN (BUILT_IN_POW):
8186 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8187 x = CALL_EXPR_ARG (arg, 0);
8188 exponent = CALL_EXPR_ARG (arg, 1);
8189 break;
8190 default:
8191 break;
8194 /* Now perform the optimization. */
8195 if (x && exponent)
8197 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8198 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8203 return NULL_TREE;
8206 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8207 NULL_TREE if no simplification can be made. */
8209 static tree
8210 fold_builtin_hypot (location_t loc, tree fndecl,
8211 tree arg0, tree arg1, tree type)
8213 tree res, narg0, narg1;
8215 if (!validate_arg (arg0, REAL_TYPE)
8216 || !validate_arg (arg1, REAL_TYPE))
8217 return NULL_TREE;
8219 /* Calculate the result when the argument is a constant. */
8220 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8221 return res;
8223 /* If either argument to hypot has a negate or abs, strip that off.
8224 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8225 narg0 = fold_strip_sign_ops (arg0);
8226 narg1 = fold_strip_sign_ops (arg1);
8227 if (narg0 || narg1)
8229 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8230 narg1 ? narg1 : arg1);
8233 /* If either argument is zero, hypot is fabs of the other. */
8234 if (real_zerop (arg0))
8235 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8236 else if (real_zerop (arg1))
8237 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8239 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8240 if (flag_unsafe_math_optimizations
8241 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8243 const REAL_VALUE_TYPE sqrt2_trunc
8244 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8245 return fold_build2_loc (loc, MULT_EXPR, type,
8246 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8247 build_real (type, sqrt2_trunc));
8250 return NULL_TREE;
8254 /* Fold a builtin function call to pow, powf, or powl. Return
8255 NULL_TREE if no simplification can be made. */
8256 static tree
8257 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8259 tree res;
8261 if (!validate_arg (arg0, REAL_TYPE)
8262 || !validate_arg (arg1, REAL_TYPE))
8263 return NULL_TREE;
8265 /* Calculate the result when the argument is a constant. */
8266 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8267 return res;
8269 /* Optimize pow(1.0,y) = 1.0. */
8270 if (real_onep (arg0))
8271 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8273 if (TREE_CODE (arg1) == REAL_CST
8274 && !TREE_OVERFLOW (arg1))
8276 REAL_VALUE_TYPE cint;
8277 REAL_VALUE_TYPE c;
8278 HOST_WIDE_INT n;
8280 c = TREE_REAL_CST (arg1);
8282 /* Optimize pow(x,0.0) = 1.0. */
8283 if (REAL_VALUES_EQUAL (c, dconst0))
8284 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8285 arg0);
8287 /* Optimize pow(x,1.0) = x. */
8288 if (REAL_VALUES_EQUAL (c, dconst1))
8289 return arg0;
8291 /* Optimize pow(x,-1.0) = 1.0/x. */
8292 if (REAL_VALUES_EQUAL (c, dconstm1))
8293 return fold_build2_loc (loc, RDIV_EXPR, type,
8294 build_real (type, dconst1), arg0);
8296 /* Optimize pow(x,0.5) = sqrt(x). */
8297 if (flag_unsafe_math_optimizations
8298 && REAL_VALUES_EQUAL (c, dconsthalf))
8300 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8302 if (sqrtfn != NULL_TREE)
8303 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8306 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8307 if (flag_unsafe_math_optimizations)
8309 const REAL_VALUE_TYPE dconstroot
8310 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8312 if (REAL_VALUES_EQUAL (c, dconstroot))
8314 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8315 if (cbrtfn != NULL_TREE)
8316 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8320 /* Check for an integer exponent. */
8321 n = real_to_integer (&c);
8322 real_from_integer (&cint, VOIDmode, n, SIGNED);
8323 if (real_identical (&c, &cint))
8325 /* Attempt to evaluate pow at compile-time, unless this should
8326 raise an exception. */
8327 if (TREE_CODE (arg0) == REAL_CST
8328 && !TREE_OVERFLOW (arg0)
8329 && (n > 0
8330 || (!flag_trapping_math && !flag_errno_math)
8331 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8333 REAL_VALUE_TYPE x;
8334 bool inexact;
8336 x = TREE_REAL_CST (arg0);
8337 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8338 if (flag_unsafe_math_optimizations || !inexact)
8339 return build_real (type, x);
8342 /* Strip sign ops from even integer powers. */
8343 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8345 tree narg0 = fold_strip_sign_ops (arg0);
8346 if (narg0)
8347 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8352 if (flag_unsafe_math_optimizations)
8354 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8356 /* Optimize pow(expN(x),y) = expN(x*y). */
8357 if (BUILTIN_EXPONENT_P (fcode))
8359 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8360 tree arg = CALL_EXPR_ARG (arg0, 0);
8361 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8362 return build_call_expr_loc (loc, expfn, 1, arg);
8365 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8366 if (BUILTIN_SQRT_P (fcode))
8368 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8369 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8370 build_real (type, dconsthalf));
8371 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8374 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8375 if (BUILTIN_CBRT_P (fcode))
8377 tree arg = CALL_EXPR_ARG (arg0, 0);
8378 if (tree_expr_nonnegative_p (arg))
8380 const REAL_VALUE_TYPE dconstroot
8381 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8382 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8383 build_real (type, dconstroot));
8384 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8388 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8389 if (fcode == BUILT_IN_POW
8390 || fcode == BUILT_IN_POWF
8391 || fcode == BUILT_IN_POWL)
8393 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8394 if (tree_expr_nonnegative_p (arg00))
8396 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8397 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8398 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8403 return NULL_TREE;
8406 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8407 Return NULL_TREE if no simplification can be made. */
8408 static tree
8409 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8410 tree arg0, tree arg1, tree type)
8412 if (!validate_arg (arg0, REAL_TYPE)
8413 || !validate_arg (arg1, INTEGER_TYPE))
8414 return NULL_TREE;
8416 /* Optimize pow(1.0,y) = 1.0. */
8417 if (real_onep (arg0))
8418 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8420 if (tree_fits_shwi_p (arg1))
8422 HOST_WIDE_INT c = tree_to_shwi (arg1);
8424 /* Evaluate powi at compile-time. */
8425 if (TREE_CODE (arg0) == REAL_CST
8426 && !TREE_OVERFLOW (arg0))
8428 REAL_VALUE_TYPE x;
8429 x = TREE_REAL_CST (arg0);
8430 real_powi (&x, TYPE_MODE (type), &x, c);
8431 return build_real (type, x);
8434 /* Optimize pow(x,0) = 1.0. */
8435 if (c == 0)
8436 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8437 arg0);
8439 /* Optimize pow(x,1) = x. */
8440 if (c == 1)
8441 return arg0;
8443 /* Optimize pow(x,-1) = 1.0/x. */
8444 if (c == -1)
8445 return fold_build2_loc (loc, RDIV_EXPR, type,
8446 build_real (type, dconst1), arg0);
8449 return NULL_TREE;
8452 /* A subroutine of fold_builtin to fold the various exponent
8453 functions. Return NULL_TREE if no simplification can be made.
8454 FUNC is the corresponding MPFR exponent function. */
8456 static tree
8457 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8458 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8460 if (validate_arg (arg, REAL_TYPE))
8462 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8463 tree res;
8465 /* Calculate the result when the argument is a constant. */
8466 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8467 return res;
8469 /* Optimize expN(logN(x)) = x. */
8470 if (flag_unsafe_math_optimizations)
8472 const enum built_in_function fcode = builtin_mathfn_code (arg);
8474 if ((func == mpfr_exp
8475 && (fcode == BUILT_IN_LOG
8476 || fcode == BUILT_IN_LOGF
8477 || fcode == BUILT_IN_LOGL))
8478 || (func == mpfr_exp2
8479 && (fcode == BUILT_IN_LOG2
8480 || fcode == BUILT_IN_LOG2F
8481 || fcode == BUILT_IN_LOG2L))
8482 || (func == mpfr_exp10
8483 && (fcode == BUILT_IN_LOG10
8484 || fcode == BUILT_IN_LOG10F
8485 || fcode == BUILT_IN_LOG10L)))
8486 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8490 return NULL_TREE;
8493 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8494 Return NULL_TREE if no simplification can be made. */
8496 static tree
8497 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8499 tree fn, len, lenp1, call, type;
8501 if (!validate_arg (dest, POINTER_TYPE)
8502 || !validate_arg (src, POINTER_TYPE))
8503 return NULL_TREE;
8505 len = c_strlen (src, 1);
8506 if (!len
8507 || TREE_CODE (len) != INTEGER_CST)
8508 return NULL_TREE;
8510 if (optimize_function_for_size_p (cfun)
8511 /* If length is zero it's small enough. */
8512 && !integer_zerop (len))
8513 return NULL_TREE;
8515 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8516 if (!fn)
8517 return NULL_TREE;
8519 lenp1 = size_binop_loc (loc, PLUS_EXPR,
8520 fold_convert_loc (loc, size_type_node, len),
8521 build_int_cst (size_type_node, 1));
8522 /* We use dest twice in building our expression. Save it from
8523 multiple expansions. */
8524 dest = builtin_save_expr (dest);
8525 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8527 type = TREE_TYPE (TREE_TYPE (fndecl));
8528 dest = fold_build_pointer_plus_loc (loc, dest, len);
8529 dest = fold_convert_loc (loc, type, dest);
8530 dest = omit_one_operand_loc (loc, type, dest, call);
8531 return dest;
8534 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8535 arguments to the call, and TYPE is its return type.
8536 Return NULL_TREE if no simplification can be made. */
8538 static tree
8539 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8541 if (!validate_arg (arg1, POINTER_TYPE)
8542 || !validate_arg (arg2, INTEGER_TYPE)
8543 || !validate_arg (len, INTEGER_TYPE))
8544 return NULL_TREE;
8545 else
8547 const char *p1;
8549 if (TREE_CODE (arg2) != INTEGER_CST
8550 || !tree_fits_uhwi_p (len))
8551 return NULL_TREE;
8553 p1 = c_getstr (arg1);
8554 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8556 char c;
8557 const char *r;
8558 tree tem;
8560 if (target_char_cast (arg2, &c))
8561 return NULL_TREE;
8563 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8565 if (r == NULL)
8566 return build_int_cst (TREE_TYPE (arg1), 0);
8568 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8569 return fold_convert_loc (loc, type, tem);
8571 return NULL_TREE;
8575 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8576 Return NULL_TREE if no simplification can be made. */
8578 static tree
8579 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8581 const char *p1, *p2;
8583 if (!validate_arg (arg1, POINTER_TYPE)
8584 || !validate_arg (arg2, POINTER_TYPE)
8585 || !validate_arg (len, INTEGER_TYPE))
8586 return NULL_TREE;
8588 /* If the LEN parameter is zero, return zero. */
8589 if (integer_zerop (len))
8590 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8591 arg1, arg2);
8593 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8594 if (operand_equal_p (arg1, arg2, 0))
8595 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8597 p1 = c_getstr (arg1);
8598 p2 = c_getstr (arg2);
8600 /* If all arguments are constant, and the value of len is not greater
8601 than the lengths of arg1 and arg2, evaluate at compile-time. */
8602 if (tree_fits_uhwi_p (len) && p1 && p2
8603 && compare_tree_int (len, strlen (p1) + 1) <= 0
8604 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8606 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8608 if (r > 0)
8609 return integer_one_node;
8610 else if (r < 0)
8611 return integer_minus_one_node;
8612 else
8613 return integer_zero_node;
8616 /* If len parameter is one, return an expression corresponding to
8617 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8618 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8620 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8621 tree cst_uchar_ptr_node
8622 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8624 tree ind1
8625 = fold_convert_loc (loc, integer_type_node,
8626 build1 (INDIRECT_REF, cst_uchar_node,
8627 fold_convert_loc (loc,
8628 cst_uchar_ptr_node,
8629 arg1)));
8630 tree ind2
8631 = fold_convert_loc (loc, integer_type_node,
8632 build1 (INDIRECT_REF, cst_uchar_node,
8633 fold_convert_loc (loc,
8634 cst_uchar_ptr_node,
8635 arg2)));
8636 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8639 return NULL_TREE;
8642 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8643 Return NULL_TREE if no simplification can be made. */
8645 static tree
8646 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8648 const char *p1, *p2;
8650 if (!validate_arg (arg1, POINTER_TYPE)
8651 || !validate_arg (arg2, POINTER_TYPE))
8652 return NULL_TREE;
8654 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8655 if (operand_equal_p (arg1, arg2, 0))
8656 return integer_zero_node;
8658 p1 = c_getstr (arg1);
8659 p2 = c_getstr (arg2);
8661 if (p1 && p2)
8663 const int i = strcmp (p1, p2);
8664 if (i < 0)
8665 return integer_minus_one_node;
8666 else if (i > 0)
8667 return integer_one_node;
8668 else
8669 return integer_zero_node;
8672 /* If the second arg is "", return *(const unsigned char*)arg1. */
8673 if (p2 && *p2 == '\0')
8675 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8676 tree cst_uchar_ptr_node
8677 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8679 return fold_convert_loc (loc, integer_type_node,
8680 build1 (INDIRECT_REF, cst_uchar_node,
8681 fold_convert_loc (loc,
8682 cst_uchar_ptr_node,
8683 arg1)));
8686 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8687 if (p1 && *p1 == '\0')
8689 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8690 tree cst_uchar_ptr_node
8691 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8693 tree temp
8694 = fold_convert_loc (loc, integer_type_node,
8695 build1 (INDIRECT_REF, cst_uchar_node,
8696 fold_convert_loc (loc,
8697 cst_uchar_ptr_node,
8698 arg2)));
8699 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8702 return NULL_TREE;
8705 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8706 Return NULL_TREE if no simplification can be made. */
8708 static tree
8709 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8711 const char *p1, *p2;
8713 if (!validate_arg (arg1, POINTER_TYPE)
8714 || !validate_arg (arg2, POINTER_TYPE)
8715 || !validate_arg (len, INTEGER_TYPE))
8716 return NULL_TREE;
8718 /* If the LEN parameter is zero, return zero. */
8719 if (integer_zerop (len))
8720 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8721 arg1, arg2);
8723 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8724 if (operand_equal_p (arg1, arg2, 0))
8725 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8727 p1 = c_getstr (arg1);
8728 p2 = c_getstr (arg2);
8730 if (tree_fits_uhwi_p (len) && p1 && p2)
8732 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8733 if (i > 0)
8734 return integer_one_node;
8735 else if (i < 0)
8736 return integer_minus_one_node;
8737 else
8738 return integer_zero_node;
8741 /* If the second arg is "", and the length is greater than zero,
8742 return *(const unsigned char*)arg1. */
8743 if (p2 && *p2 == '\0'
8744 && TREE_CODE (len) == INTEGER_CST
8745 && tree_int_cst_sgn (len) == 1)
8747 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8748 tree cst_uchar_ptr_node
8749 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8751 return fold_convert_loc (loc, integer_type_node,
8752 build1 (INDIRECT_REF, cst_uchar_node,
8753 fold_convert_loc (loc,
8754 cst_uchar_ptr_node,
8755 arg1)));
8758 /* If the first arg is "", and the length is greater than zero,
8759 return -*(const unsigned char*)arg2. */
8760 if (p1 && *p1 == '\0'
8761 && TREE_CODE (len) == INTEGER_CST
8762 && tree_int_cst_sgn (len) == 1)
8764 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8765 tree cst_uchar_ptr_node
8766 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8768 tree temp = fold_convert_loc (loc, integer_type_node,
8769 build1 (INDIRECT_REF, cst_uchar_node,
8770 fold_convert_loc (loc,
8771 cst_uchar_ptr_node,
8772 arg2)));
8773 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8776 /* If len parameter is one, return an expression corresponding to
8777 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8778 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8780 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8781 tree cst_uchar_ptr_node
8782 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8784 tree ind1 = fold_convert_loc (loc, integer_type_node,
8785 build1 (INDIRECT_REF, cst_uchar_node,
8786 fold_convert_loc (loc,
8787 cst_uchar_ptr_node,
8788 arg1)));
8789 tree ind2 = fold_convert_loc (loc, integer_type_node,
8790 build1 (INDIRECT_REF, cst_uchar_node,
8791 fold_convert_loc (loc,
8792 cst_uchar_ptr_node,
8793 arg2)));
8794 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8797 return NULL_TREE;
8800 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8801 ARG. Return NULL_TREE if no simplification can be made. */
8803 static tree
8804 fold_builtin_signbit (location_t loc, tree arg, tree type)
8806 if (!validate_arg (arg, REAL_TYPE))
8807 return NULL_TREE;
8809 /* If ARG is a compile-time constant, determine the result. */
8810 if (TREE_CODE (arg) == REAL_CST
8811 && !TREE_OVERFLOW (arg))
8813 REAL_VALUE_TYPE c;
8815 c = TREE_REAL_CST (arg);
8816 return (REAL_VALUE_NEGATIVE (c)
8817 ? build_one_cst (type)
8818 : build_zero_cst (type));
8821 /* If ARG is non-negative, the result is always zero. */
8822 if (tree_expr_nonnegative_p (arg))
8823 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8825 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8826 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8827 return fold_convert (type,
8828 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8829 build_real (TREE_TYPE (arg), dconst0)));
8831 return NULL_TREE;
8834 /* Fold function call to builtin copysign, copysignf or copysignl with
8835 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8836 be made. */
8838 static tree
8839 fold_builtin_copysign (location_t loc, tree fndecl,
8840 tree arg1, tree arg2, tree type)
8842 tree tem;
8844 if (!validate_arg (arg1, REAL_TYPE)
8845 || !validate_arg (arg2, REAL_TYPE))
8846 return NULL_TREE;
8848 /* copysign(X,X) is X. */
8849 if (operand_equal_p (arg1, arg2, 0))
8850 return fold_convert_loc (loc, type, arg1);
8852 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8853 if (TREE_CODE (arg1) == REAL_CST
8854 && TREE_CODE (arg2) == REAL_CST
8855 && !TREE_OVERFLOW (arg1)
8856 && !TREE_OVERFLOW (arg2))
8858 REAL_VALUE_TYPE c1, c2;
8860 c1 = TREE_REAL_CST (arg1);
8861 c2 = TREE_REAL_CST (arg2);
8862 /* c1.sign := c2.sign. */
8863 real_copysign (&c1, &c2);
8864 return build_real (type, c1);
8867 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8868 Remember to evaluate Y for side-effects. */
8869 if (tree_expr_nonnegative_p (arg2))
8870 return omit_one_operand_loc (loc, type,
8871 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8872 arg2);
8874 /* Strip sign changing operations for the first argument. */
8875 tem = fold_strip_sign_ops (arg1);
8876 if (tem)
8877 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8879 return NULL_TREE;
8882 /* Fold a call to builtin isascii with argument ARG. */
8884 static tree
8885 fold_builtin_isascii (location_t loc, tree arg)
8887 if (!validate_arg (arg, INTEGER_TYPE))
8888 return NULL_TREE;
8889 else
8891 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8892 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8893 build_int_cst (integer_type_node,
8894 ~ (unsigned HOST_WIDE_INT) 0x7f));
8895 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8896 arg, integer_zero_node);
8900 /* Fold a call to builtin toascii with argument ARG. */
8902 static tree
8903 fold_builtin_toascii (location_t loc, tree arg)
8905 if (!validate_arg (arg, INTEGER_TYPE))
8906 return NULL_TREE;
8908 /* Transform toascii(c) -> (c & 0x7f). */
8909 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8910 build_int_cst (integer_type_node, 0x7f));
8913 /* Fold a call to builtin isdigit with argument ARG. */
8915 static tree
8916 fold_builtin_isdigit (location_t loc, tree arg)
8918 if (!validate_arg (arg, INTEGER_TYPE))
8919 return NULL_TREE;
8920 else
8922 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8923 /* According to the C standard, isdigit is unaffected by locale.
8924 However, it definitely is affected by the target character set. */
8925 unsigned HOST_WIDE_INT target_digit0
8926 = lang_hooks.to_target_charset ('0');
8928 if (target_digit0 == 0)
8929 return NULL_TREE;
8931 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8932 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8933 build_int_cst (unsigned_type_node, target_digit0));
8934 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8935 build_int_cst (unsigned_type_node, 9));
8939 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8941 static tree
8942 fold_builtin_fabs (location_t loc, tree arg, tree type)
8944 if (!validate_arg (arg, REAL_TYPE))
8945 return NULL_TREE;
8947 arg = fold_convert_loc (loc, type, arg);
8948 if (TREE_CODE (arg) == REAL_CST)
8949 return fold_abs_const (arg, type);
8950 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8953 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8955 static tree
8956 fold_builtin_abs (location_t loc, tree arg, tree type)
8958 if (!validate_arg (arg, INTEGER_TYPE))
8959 return NULL_TREE;
8961 arg = fold_convert_loc (loc, type, arg);
8962 if (TREE_CODE (arg) == INTEGER_CST)
8963 return fold_abs_const (arg, type);
8964 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8967 /* Fold a fma operation with arguments ARG[012]. */
8969 tree
8970 fold_fma (location_t loc ATTRIBUTE_UNUSED,
8971 tree type, tree arg0, tree arg1, tree arg2)
8973 if (TREE_CODE (arg0) == REAL_CST
8974 && TREE_CODE (arg1) == REAL_CST
8975 && TREE_CODE (arg2) == REAL_CST)
8976 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
8978 return NULL_TREE;
8981 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8983 static tree
8984 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8986 if (validate_arg (arg0, REAL_TYPE)
8987 && validate_arg (arg1, REAL_TYPE)
8988 && validate_arg (arg2, REAL_TYPE))
8990 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
8991 if (tem)
8992 return tem;
8994 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8995 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8996 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8998 return NULL_TREE;
9001 /* Fold a call to builtin fmin or fmax. */
9003 static tree
9004 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9005 tree type, bool max)
9007 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9009 /* Calculate the result when the argument is a constant. */
9010 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9012 if (res)
9013 return res;
9015 /* If either argument is NaN, return the other one. Avoid the
9016 transformation if we get (and honor) a signalling NaN. Using
9017 omit_one_operand() ensures we create a non-lvalue. */
9018 if (TREE_CODE (arg0) == REAL_CST
9019 && real_isnan (&TREE_REAL_CST (arg0))
9020 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9021 || ! TREE_REAL_CST (arg0).signalling))
9022 return omit_one_operand_loc (loc, type, arg1, arg0);
9023 if (TREE_CODE (arg1) == REAL_CST
9024 && real_isnan (&TREE_REAL_CST (arg1))
9025 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9026 || ! TREE_REAL_CST (arg1).signalling))
9027 return omit_one_operand_loc (loc, type, arg0, arg1);
9029 /* Transform fmin/fmax(x,x) -> x. */
9030 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9031 return omit_one_operand_loc (loc, type, arg0, arg1);
9033 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9034 functions to return the numeric arg if the other one is NaN.
9035 These tree codes don't honor that, so only transform if
9036 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9037 handled, so we don't have to worry about it either. */
9038 if (flag_finite_math_only)
9039 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9040 fold_convert_loc (loc, type, arg0),
9041 fold_convert_loc (loc, type, arg1));
9043 return NULL_TREE;
9046 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9048 static tree
9049 fold_builtin_carg (location_t loc, tree arg, tree type)
9051 if (validate_arg (arg, COMPLEX_TYPE)
9052 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9054 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9056 if (atan2_fn)
9058 tree new_arg = builtin_save_expr (arg);
9059 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9060 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9061 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9065 return NULL_TREE;
9068 /* Fold a call to builtin logb/ilogb. */
9070 static tree
9071 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9073 if (! validate_arg (arg, REAL_TYPE))
9074 return NULL_TREE;
9076 STRIP_NOPS (arg);
9078 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9080 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9082 switch (value->cl)
9084 case rvc_nan:
9085 case rvc_inf:
9086 /* If arg is Inf or NaN and we're logb, return it. */
9087 if (TREE_CODE (rettype) == REAL_TYPE)
9089 /* For logb(-Inf) we have to return +Inf. */
9090 if (real_isinf (value) && real_isneg (value))
9092 REAL_VALUE_TYPE tem;
9093 real_inf (&tem);
9094 return build_real (rettype, tem);
9096 return fold_convert_loc (loc, rettype, arg);
9098 /* Fall through... */
9099 case rvc_zero:
9100 /* Zero may set errno and/or raise an exception for logb, also
9101 for ilogb we don't know FP_ILOGB0. */
9102 return NULL_TREE;
9103 case rvc_normal:
9104 /* For normal numbers, proceed iff radix == 2. In GCC,
9105 normalized significands are in the range [0.5, 1.0). We
9106 want the exponent as if they were [1.0, 2.0) so get the
9107 exponent and subtract 1. */
9108 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9109 return fold_convert_loc (loc, rettype,
9110 build_int_cst (integer_type_node,
9111 REAL_EXP (value)-1));
9112 break;
9116 return NULL_TREE;
9119 /* Fold a call to builtin significand, if radix == 2. */
9121 static tree
9122 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9124 if (! validate_arg (arg, REAL_TYPE))
9125 return NULL_TREE;
9127 STRIP_NOPS (arg);
9129 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9131 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9133 switch (value->cl)
9135 case rvc_zero:
9136 case rvc_nan:
9137 case rvc_inf:
9138 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9139 return fold_convert_loc (loc, rettype, arg);
9140 case rvc_normal:
9141 /* For normal numbers, proceed iff radix == 2. */
9142 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9144 REAL_VALUE_TYPE result = *value;
9145 /* In GCC, normalized significands are in the range [0.5,
9146 1.0). We want them to be [1.0, 2.0) so set the
9147 exponent to 1. */
9148 SET_REAL_EXP (&result, 1);
9149 return build_real (rettype, result);
9151 break;
9155 return NULL_TREE;
9158 /* Fold a call to builtin frexp, we can assume the base is 2. */
9160 static tree
9161 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9163 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9164 return NULL_TREE;
9166 STRIP_NOPS (arg0);
9168 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9169 return NULL_TREE;
9171 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9173 /* Proceed if a valid pointer type was passed in. */
9174 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9176 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9177 tree frac, exp;
9179 switch (value->cl)
9181 case rvc_zero:
9182 /* For +-0, return (*exp = 0, +-0). */
9183 exp = integer_zero_node;
9184 frac = arg0;
9185 break;
9186 case rvc_nan:
9187 case rvc_inf:
9188 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9189 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9190 case rvc_normal:
9192 /* Since the frexp function always expects base 2, and in
9193 GCC normalized significands are already in the range
9194 [0.5, 1.0), we have exactly what frexp wants. */
9195 REAL_VALUE_TYPE frac_rvt = *value;
9196 SET_REAL_EXP (&frac_rvt, 0);
9197 frac = build_real (rettype, frac_rvt);
9198 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9200 break;
9201 default:
9202 gcc_unreachable ();
9205 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9206 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9207 TREE_SIDE_EFFECTS (arg1) = 1;
9208 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9211 return NULL_TREE;
9214 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9215 then we can assume the base is two. If it's false, then we have to
9216 check the mode of the TYPE parameter in certain cases. */
9218 static tree
9219 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9220 tree type, bool ldexp)
9222 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9224 STRIP_NOPS (arg0);
9225 STRIP_NOPS (arg1);
9227 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9228 if (real_zerop (arg0) || integer_zerop (arg1)
9229 || (TREE_CODE (arg0) == REAL_CST
9230 && !real_isfinite (&TREE_REAL_CST (arg0))))
9231 return omit_one_operand_loc (loc, type, arg0, arg1);
9233 /* If both arguments are constant, then try to evaluate it. */
9234 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9235 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9236 && tree_fits_shwi_p (arg1))
9238 /* Bound the maximum adjustment to twice the range of the
9239 mode's valid exponents. Use abs to ensure the range is
9240 positive as a sanity check. */
9241 const long max_exp_adj = 2 *
9242 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9243 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9245 /* Get the user-requested adjustment. */
9246 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9248 /* The requested adjustment must be inside this range. This
9249 is a preliminary cap to avoid things like overflow, we
9250 may still fail to compute the result for other reasons. */
9251 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9253 REAL_VALUE_TYPE initial_result;
9255 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9257 /* Ensure we didn't overflow. */
9258 if (! real_isinf (&initial_result))
9260 const REAL_VALUE_TYPE trunc_result
9261 = real_value_truncate (TYPE_MODE (type), initial_result);
9263 /* Only proceed if the target mode can hold the
9264 resulting value. */
9265 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9266 return build_real (type, trunc_result);
9272 return NULL_TREE;
9275 /* Fold a call to builtin modf. */
9277 static tree
9278 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9280 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9281 return NULL_TREE;
9283 STRIP_NOPS (arg0);
9285 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9286 return NULL_TREE;
9288 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9290 /* Proceed if a valid pointer type was passed in. */
9291 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9293 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9294 REAL_VALUE_TYPE trunc, frac;
9296 switch (value->cl)
9298 case rvc_nan:
9299 case rvc_zero:
9300 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9301 trunc = frac = *value;
9302 break;
9303 case rvc_inf:
9304 /* For +-Inf, return (*arg1 = arg0, +-0). */
9305 frac = dconst0;
9306 frac.sign = value->sign;
9307 trunc = *value;
9308 break;
9309 case rvc_normal:
9310 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9311 real_trunc (&trunc, VOIDmode, value);
9312 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9313 /* If the original number was negative and already
9314 integral, then the fractional part is -0.0. */
9315 if (value->sign && frac.cl == rvc_zero)
9316 frac.sign = value->sign;
9317 break;
9320 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9321 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9322 build_real (rettype, trunc));
9323 TREE_SIDE_EFFECTS (arg1) = 1;
9324 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9325 build_real (rettype, frac));
9328 return NULL_TREE;
9331 /* Given a location LOC, an interclass builtin function decl FNDECL
9332 and its single argument ARG, return an folded expression computing
9333 the same, or NULL_TREE if we either couldn't or didn't want to fold
9334 (the latter happen if there's an RTL instruction available). */
9336 static tree
9337 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9339 enum machine_mode mode;
9341 if (!validate_arg (arg, REAL_TYPE))
9342 return NULL_TREE;
9344 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9345 return NULL_TREE;
9347 mode = TYPE_MODE (TREE_TYPE (arg));
9349 /* If there is no optab, try generic code. */
9350 switch (DECL_FUNCTION_CODE (fndecl))
9352 tree result;
9354 CASE_FLT_FN (BUILT_IN_ISINF):
9356 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9357 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9358 tree const type = TREE_TYPE (arg);
9359 REAL_VALUE_TYPE r;
9360 char buf[128];
9362 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9363 real_from_string (&r, buf);
9364 result = build_call_expr (isgr_fn, 2,
9365 fold_build1_loc (loc, ABS_EXPR, type, arg),
9366 build_real (type, r));
9367 return result;
9369 CASE_FLT_FN (BUILT_IN_FINITE):
9370 case BUILT_IN_ISFINITE:
9372 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9373 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9374 tree const type = TREE_TYPE (arg);
9375 REAL_VALUE_TYPE r;
9376 char buf[128];
9378 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9379 real_from_string (&r, buf);
9380 result = build_call_expr (isle_fn, 2,
9381 fold_build1_loc (loc, ABS_EXPR, type, arg),
9382 build_real (type, r));
9383 /*result = fold_build2_loc (loc, UNGT_EXPR,
9384 TREE_TYPE (TREE_TYPE (fndecl)),
9385 fold_build1_loc (loc, ABS_EXPR, type, arg),
9386 build_real (type, r));
9387 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9388 TREE_TYPE (TREE_TYPE (fndecl)),
9389 result);*/
9390 return result;
9392 case BUILT_IN_ISNORMAL:
9394 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9395 islessequal(fabs(x),DBL_MAX). */
9396 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9397 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9398 tree const type = TREE_TYPE (arg);
9399 REAL_VALUE_TYPE rmax, rmin;
9400 char buf[128];
9402 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9403 real_from_string (&rmax, buf);
9404 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9405 real_from_string (&rmin, buf);
9406 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9407 result = build_call_expr (isle_fn, 2, arg,
9408 build_real (type, rmax));
9409 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9410 build_call_expr (isge_fn, 2, arg,
9411 build_real (type, rmin)));
9412 return result;
9414 default:
9415 break;
9418 return NULL_TREE;
9421 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9422 ARG is the argument for the call. */
9424 static tree
9425 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9427 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9428 REAL_VALUE_TYPE r;
9430 if (!validate_arg (arg, REAL_TYPE))
9431 return NULL_TREE;
9433 switch (builtin_index)
9435 case BUILT_IN_ISINF:
9436 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9437 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9439 if (TREE_CODE (arg) == REAL_CST)
9441 r = TREE_REAL_CST (arg);
9442 if (real_isinf (&r))
9443 return real_compare (GT_EXPR, &r, &dconst0)
9444 ? integer_one_node : integer_minus_one_node;
9445 else
9446 return integer_zero_node;
9449 return NULL_TREE;
9451 case BUILT_IN_ISINF_SIGN:
9453 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9454 /* In a boolean context, GCC will fold the inner COND_EXPR to
9455 1. So e.g. "if (isinf_sign(x))" would be folded to just
9456 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9457 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9458 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9459 tree tmp = NULL_TREE;
9461 arg = builtin_save_expr (arg);
9463 if (signbit_fn && isinf_fn)
9465 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9466 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9468 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9469 signbit_call, integer_zero_node);
9470 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9471 isinf_call, integer_zero_node);
9473 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9474 integer_minus_one_node, integer_one_node);
9475 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9476 isinf_call, tmp,
9477 integer_zero_node);
9480 return tmp;
9483 case BUILT_IN_ISFINITE:
9484 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9485 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9486 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9488 if (TREE_CODE (arg) == REAL_CST)
9490 r = TREE_REAL_CST (arg);
9491 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9494 return NULL_TREE;
9496 case BUILT_IN_ISNAN:
9497 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9498 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9500 if (TREE_CODE (arg) == REAL_CST)
9502 r = TREE_REAL_CST (arg);
9503 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9506 arg = builtin_save_expr (arg);
9507 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9509 default:
9510 gcc_unreachable ();
9514 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9515 This builtin will generate code to return the appropriate floating
9516 point classification depending on the value of the floating point
9517 number passed in. The possible return values must be supplied as
9518 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9519 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9520 one floating point argument which is "type generic". */
9522 static tree
9523 fold_builtin_fpclassify (location_t loc, tree exp)
9525 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9526 arg, type, res, tmp;
9527 enum machine_mode mode;
9528 REAL_VALUE_TYPE r;
9529 char buf[128];
9531 /* Verify the required arguments in the original call. */
9532 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9533 INTEGER_TYPE, INTEGER_TYPE,
9534 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9535 return NULL_TREE;
9537 fp_nan = CALL_EXPR_ARG (exp, 0);
9538 fp_infinite = CALL_EXPR_ARG (exp, 1);
9539 fp_normal = CALL_EXPR_ARG (exp, 2);
9540 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9541 fp_zero = CALL_EXPR_ARG (exp, 4);
9542 arg = CALL_EXPR_ARG (exp, 5);
9543 type = TREE_TYPE (arg);
9544 mode = TYPE_MODE (type);
9545 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9547 /* fpclassify(x) ->
9548 isnan(x) ? FP_NAN :
9549 (fabs(x) == Inf ? FP_INFINITE :
9550 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9551 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9553 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9554 build_real (type, dconst0));
9555 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9556 tmp, fp_zero, fp_subnormal);
9558 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9559 real_from_string (&r, buf);
9560 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9561 arg, build_real (type, r));
9562 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9564 if (HONOR_INFINITIES (mode))
9566 real_inf (&r);
9567 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9568 build_real (type, r));
9569 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9570 fp_infinite, res);
9573 if (HONOR_NANS (mode))
9575 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9576 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9579 return res;
9582 /* Fold a call to an unordered comparison function such as
9583 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9584 being called and ARG0 and ARG1 are the arguments for the call.
9585 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9586 the opposite of the desired result. UNORDERED_CODE is used
9587 for modes that can hold NaNs and ORDERED_CODE is used for
9588 the rest. */
9590 static tree
9591 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9592 enum tree_code unordered_code,
9593 enum tree_code ordered_code)
9595 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9596 enum tree_code code;
9597 tree type0, type1;
9598 enum tree_code code0, code1;
9599 tree cmp_type = NULL_TREE;
9601 type0 = TREE_TYPE (arg0);
9602 type1 = TREE_TYPE (arg1);
9604 code0 = TREE_CODE (type0);
9605 code1 = TREE_CODE (type1);
9607 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9608 /* Choose the wider of two real types. */
9609 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9610 ? type0 : type1;
9611 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9612 cmp_type = type0;
9613 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9614 cmp_type = type1;
9616 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9617 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9619 if (unordered_code == UNORDERED_EXPR)
9621 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9622 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9623 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9626 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9627 : ordered_code;
9628 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9629 fold_build2_loc (loc, code, type, arg0, arg1));
9632 /* Fold a call to built-in function FNDECL with 0 arguments.
9633 IGNORE is true if the result of the function call is ignored. This
9634 function returns NULL_TREE if no simplification was possible. */
9636 static tree
9637 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9639 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9640 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9641 switch (fcode)
9643 CASE_FLT_FN (BUILT_IN_INF):
9644 case BUILT_IN_INFD32:
9645 case BUILT_IN_INFD64:
9646 case BUILT_IN_INFD128:
9647 return fold_builtin_inf (loc, type, true);
9649 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9650 return fold_builtin_inf (loc, type, false);
9652 case BUILT_IN_CLASSIFY_TYPE:
9653 return fold_builtin_classify_type (NULL_TREE);
9655 case BUILT_IN_UNREACHABLE:
9656 if (flag_sanitize & SANITIZE_UNREACHABLE
9657 && (current_function_decl == NULL
9658 || !lookup_attribute ("no_sanitize_undefined",
9659 DECL_ATTRIBUTES (current_function_decl))))
9660 return ubsan_instrument_unreachable (loc);
9661 break;
9663 default:
9664 break;
9666 return NULL_TREE;
9669 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9670 IGNORE is true if the result of the function call is ignored. This
9671 function returns NULL_TREE if no simplification was possible. */
9673 static tree
9674 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9676 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9677 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9678 switch (fcode)
9680 case BUILT_IN_CONSTANT_P:
9682 tree val = fold_builtin_constant_p (arg0);
9684 /* Gimplification will pull the CALL_EXPR for the builtin out of
9685 an if condition. When not optimizing, we'll not CSE it back.
9686 To avoid link error types of regressions, return false now. */
9687 if (!val && !optimize)
9688 val = integer_zero_node;
9690 return val;
9693 case BUILT_IN_CLASSIFY_TYPE:
9694 return fold_builtin_classify_type (arg0);
9696 case BUILT_IN_STRLEN:
9697 return fold_builtin_strlen (loc, type, arg0);
9699 CASE_FLT_FN (BUILT_IN_FABS):
9700 case BUILT_IN_FABSD32:
9701 case BUILT_IN_FABSD64:
9702 case BUILT_IN_FABSD128:
9703 return fold_builtin_fabs (loc, arg0, type);
9705 case BUILT_IN_ABS:
9706 case BUILT_IN_LABS:
9707 case BUILT_IN_LLABS:
9708 case BUILT_IN_IMAXABS:
9709 return fold_builtin_abs (loc, arg0, type);
9711 CASE_FLT_FN (BUILT_IN_CONJ):
9712 if (validate_arg (arg0, COMPLEX_TYPE)
9713 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9714 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9715 break;
9717 CASE_FLT_FN (BUILT_IN_CREAL):
9718 if (validate_arg (arg0, COMPLEX_TYPE)
9719 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9720 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9721 break;
9723 CASE_FLT_FN (BUILT_IN_CIMAG):
9724 if (validate_arg (arg0, COMPLEX_TYPE)
9725 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9726 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9727 break;
9729 CASE_FLT_FN (BUILT_IN_CCOS):
9730 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9732 CASE_FLT_FN (BUILT_IN_CCOSH):
9733 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9735 CASE_FLT_FN (BUILT_IN_CPROJ):
9736 return fold_builtin_cproj (loc, arg0, type);
9738 CASE_FLT_FN (BUILT_IN_CSIN):
9739 if (validate_arg (arg0, COMPLEX_TYPE)
9740 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9741 return do_mpc_arg1 (arg0, type, mpc_sin);
9742 break;
9744 CASE_FLT_FN (BUILT_IN_CSINH):
9745 if (validate_arg (arg0, COMPLEX_TYPE)
9746 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9747 return do_mpc_arg1 (arg0, type, mpc_sinh);
9748 break;
9750 CASE_FLT_FN (BUILT_IN_CTAN):
9751 if (validate_arg (arg0, COMPLEX_TYPE)
9752 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9753 return do_mpc_arg1 (arg0, type, mpc_tan);
9754 break;
9756 CASE_FLT_FN (BUILT_IN_CTANH):
9757 if (validate_arg (arg0, COMPLEX_TYPE)
9758 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9759 return do_mpc_arg1 (arg0, type, mpc_tanh);
9760 break;
9762 CASE_FLT_FN (BUILT_IN_CLOG):
9763 if (validate_arg (arg0, COMPLEX_TYPE)
9764 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9765 return do_mpc_arg1 (arg0, type, mpc_log);
9766 break;
9768 CASE_FLT_FN (BUILT_IN_CSQRT):
9769 if (validate_arg (arg0, COMPLEX_TYPE)
9770 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9771 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9772 break;
9774 CASE_FLT_FN (BUILT_IN_CASIN):
9775 if (validate_arg (arg0, COMPLEX_TYPE)
9776 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9777 return do_mpc_arg1 (arg0, type, mpc_asin);
9778 break;
9780 CASE_FLT_FN (BUILT_IN_CACOS):
9781 if (validate_arg (arg0, COMPLEX_TYPE)
9782 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9783 return do_mpc_arg1 (arg0, type, mpc_acos);
9784 break;
9786 CASE_FLT_FN (BUILT_IN_CATAN):
9787 if (validate_arg (arg0, COMPLEX_TYPE)
9788 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9789 return do_mpc_arg1 (arg0, type, mpc_atan);
9790 break;
9792 CASE_FLT_FN (BUILT_IN_CASINH):
9793 if (validate_arg (arg0, COMPLEX_TYPE)
9794 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9795 return do_mpc_arg1 (arg0, type, mpc_asinh);
9796 break;
9798 CASE_FLT_FN (BUILT_IN_CACOSH):
9799 if (validate_arg (arg0, COMPLEX_TYPE)
9800 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9801 return do_mpc_arg1 (arg0, type, mpc_acosh);
9802 break;
9804 CASE_FLT_FN (BUILT_IN_CATANH):
9805 if (validate_arg (arg0, COMPLEX_TYPE)
9806 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9807 return do_mpc_arg1 (arg0, type, mpc_atanh);
9808 break;
9810 CASE_FLT_FN (BUILT_IN_CABS):
9811 return fold_builtin_cabs (loc, arg0, type, fndecl);
9813 CASE_FLT_FN (BUILT_IN_CARG):
9814 return fold_builtin_carg (loc, arg0, type);
9816 CASE_FLT_FN (BUILT_IN_SQRT):
9817 return fold_builtin_sqrt (loc, arg0, type);
9819 CASE_FLT_FN (BUILT_IN_CBRT):
9820 return fold_builtin_cbrt (loc, arg0, type);
9822 CASE_FLT_FN (BUILT_IN_ASIN):
9823 if (validate_arg (arg0, REAL_TYPE))
9824 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9825 &dconstm1, &dconst1, true);
9826 break;
9828 CASE_FLT_FN (BUILT_IN_ACOS):
9829 if (validate_arg (arg0, REAL_TYPE))
9830 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9831 &dconstm1, &dconst1, true);
9832 break;
9834 CASE_FLT_FN (BUILT_IN_ATAN):
9835 if (validate_arg (arg0, REAL_TYPE))
9836 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9837 break;
9839 CASE_FLT_FN (BUILT_IN_ASINH):
9840 if (validate_arg (arg0, REAL_TYPE))
9841 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9842 break;
9844 CASE_FLT_FN (BUILT_IN_ACOSH):
9845 if (validate_arg (arg0, REAL_TYPE))
9846 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9847 &dconst1, NULL, true);
9848 break;
9850 CASE_FLT_FN (BUILT_IN_ATANH):
9851 if (validate_arg (arg0, REAL_TYPE))
9852 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9853 &dconstm1, &dconst1, false);
9854 break;
9856 CASE_FLT_FN (BUILT_IN_SIN):
9857 if (validate_arg (arg0, REAL_TYPE))
9858 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9859 break;
9861 CASE_FLT_FN (BUILT_IN_COS):
9862 return fold_builtin_cos (loc, arg0, type, fndecl);
9864 CASE_FLT_FN (BUILT_IN_TAN):
9865 return fold_builtin_tan (arg0, type);
9867 CASE_FLT_FN (BUILT_IN_CEXP):
9868 return fold_builtin_cexp (loc, arg0, type);
9870 CASE_FLT_FN (BUILT_IN_CEXPI):
9871 if (validate_arg (arg0, REAL_TYPE))
9872 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9873 break;
9875 CASE_FLT_FN (BUILT_IN_SINH):
9876 if (validate_arg (arg0, REAL_TYPE))
9877 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9878 break;
9880 CASE_FLT_FN (BUILT_IN_COSH):
9881 return fold_builtin_cosh (loc, arg0, type, fndecl);
9883 CASE_FLT_FN (BUILT_IN_TANH):
9884 if (validate_arg (arg0, REAL_TYPE))
9885 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9886 break;
9888 CASE_FLT_FN (BUILT_IN_ERF):
9889 if (validate_arg (arg0, REAL_TYPE))
9890 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9891 break;
9893 CASE_FLT_FN (BUILT_IN_ERFC):
9894 if (validate_arg (arg0, REAL_TYPE))
9895 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9896 break;
9898 CASE_FLT_FN (BUILT_IN_TGAMMA):
9899 if (validate_arg (arg0, REAL_TYPE))
9900 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9901 break;
9903 CASE_FLT_FN (BUILT_IN_EXP):
9904 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9906 CASE_FLT_FN (BUILT_IN_EXP2):
9907 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9909 CASE_FLT_FN (BUILT_IN_EXP10):
9910 CASE_FLT_FN (BUILT_IN_POW10):
9911 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9913 CASE_FLT_FN (BUILT_IN_EXPM1):
9914 if (validate_arg (arg0, REAL_TYPE))
9915 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9916 break;
9918 CASE_FLT_FN (BUILT_IN_LOG):
9919 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9921 CASE_FLT_FN (BUILT_IN_LOG2):
9922 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9924 CASE_FLT_FN (BUILT_IN_LOG10):
9925 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9927 CASE_FLT_FN (BUILT_IN_LOG1P):
9928 if (validate_arg (arg0, REAL_TYPE))
9929 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9930 &dconstm1, NULL, false);
9931 break;
9933 CASE_FLT_FN (BUILT_IN_J0):
9934 if (validate_arg (arg0, REAL_TYPE))
9935 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9936 NULL, NULL, 0);
9937 break;
9939 CASE_FLT_FN (BUILT_IN_J1):
9940 if (validate_arg (arg0, REAL_TYPE))
9941 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9942 NULL, NULL, 0);
9943 break;
9945 CASE_FLT_FN (BUILT_IN_Y0):
9946 if (validate_arg (arg0, REAL_TYPE))
9947 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9948 &dconst0, NULL, false);
9949 break;
9951 CASE_FLT_FN (BUILT_IN_Y1):
9952 if (validate_arg (arg0, REAL_TYPE))
9953 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9954 &dconst0, NULL, false);
9955 break;
9957 CASE_FLT_FN (BUILT_IN_NAN):
9958 case BUILT_IN_NAND32:
9959 case BUILT_IN_NAND64:
9960 case BUILT_IN_NAND128:
9961 return fold_builtin_nan (arg0, type, true);
9963 CASE_FLT_FN (BUILT_IN_NANS):
9964 return fold_builtin_nan (arg0, type, false);
9966 CASE_FLT_FN (BUILT_IN_FLOOR):
9967 return fold_builtin_floor (loc, fndecl, arg0);
9969 CASE_FLT_FN (BUILT_IN_CEIL):
9970 return fold_builtin_ceil (loc, fndecl, arg0);
9972 CASE_FLT_FN (BUILT_IN_TRUNC):
9973 return fold_builtin_trunc (loc, fndecl, arg0);
9975 CASE_FLT_FN (BUILT_IN_ROUND):
9976 return fold_builtin_round (loc, fndecl, arg0);
9978 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9979 CASE_FLT_FN (BUILT_IN_RINT):
9980 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9982 CASE_FLT_FN (BUILT_IN_ICEIL):
9983 CASE_FLT_FN (BUILT_IN_LCEIL):
9984 CASE_FLT_FN (BUILT_IN_LLCEIL):
9985 CASE_FLT_FN (BUILT_IN_LFLOOR):
9986 CASE_FLT_FN (BUILT_IN_IFLOOR):
9987 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9988 CASE_FLT_FN (BUILT_IN_IROUND):
9989 CASE_FLT_FN (BUILT_IN_LROUND):
9990 CASE_FLT_FN (BUILT_IN_LLROUND):
9991 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9993 CASE_FLT_FN (BUILT_IN_IRINT):
9994 CASE_FLT_FN (BUILT_IN_LRINT):
9995 CASE_FLT_FN (BUILT_IN_LLRINT):
9996 return fold_fixed_mathfn (loc, fndecl, arg0);
9998 case BUILT_IN_BSWAP16:
9999 case BUILT_IN_BSWAP32:
10000 case BUILT_IN_BSWAP64:
10001 return fold_builtin_bswap (fndecl, arg0);
10003 CASE_INT_FN (BUILT_IN_FFS):
10004 CASE_INT_FN (BUILT_IN_CLZ):
10005 CASE_INT_FN (BUILT_IN_CTZ):
10006 CASE_INT_FN (BUILT_IN_CLRSB):
10007 CASE_INT_FN (BUILT_IN_POPCOUNT):
10008 CASE_INT_FN (BUILT_IN_PARITY):
10009 return fold_builtin_bitop (fndecl, arg0);
10011 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10012 return fold_builtin_signbit (loc, arg0, type);
10014 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10015 return fold_builtin_significand (loc, arg0, type);
10017 CASE_FLT_FN (BUILT_IN_ILOGB):
10018 CASE_FLT_FN (BUILT_IN_LOGB):
10019 return fold_builtin_logb (loc, arg0, type);
10021 case BUILT_IN_ISASCII:
10022 return fold_builtin_isascii (loc, arg0);
10024 case BUILT_IN_TOASCII:
10025 return fold_builtin_toascii (loc, arg0);
10027 case BUILT_IN_ISDIGIT:
10028 return fold_builtin_isdigit (loc, arg0);
10030 CASE_FLT_FN (BUILT_IN_FINITE):
10031 case BUILT_IN_FINITED32:
10032 case BUILT_IN_FINITED64:
10033 case BUILT_IN_FINITED128:
10034 case BUILT_IN_ISFINITE:
10036 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10037 if (ret)
10038 return ret;
10039 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10042 CASE_FLT_FN (BUILT_IN_ISINF):
10043 case BUILT_IN_ISINFD32:
10044 case BUILT_IN_ISINFD64:
10045 case BUILT_IN_ISINFD128:
10047 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10048 if (ret)
10049 return ret;
10050 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10053 case BUILT_IN_ISNORMAL:
10054 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10056 case BUILT_IN_ISINF_SIGN:
10057 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10059 CASE_FLT_FN (BUILT_IN_ISNAN):
10060 case BUILT_IN_ISNAND32:
10061 case BUILT_IN_ISNAND64:
10062 case BUILT_IN_ISNAND128:
10063 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10065 case BUILT_IN_PRINTF:
10066 case BUILT_IN_PRINTF_UNLOCKED:
10067 case BUILT_IN_VPRINTF:
10068 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10070 case BUILT_IN_FREE:
10071 if (integer_zerop (arg0))
10072 return build_empty_stmt (loc);
10073 break;
10075 default:
10076 break;
10079 return NULL_TREE;
10083 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10084 IGNORE is true if the result of the function call is ignored. This
10085 function returns NULL_TREE if no simplification was possible. */
10087 static tree
10088 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10090 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10091 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10093 switch (fcode)
10095 CASE_FLT_FN (BUILT_IN_JN):
10096 if (validate_arg (arg0, INTEGER_TYPE)
10097 && validate_arg (arg1, REAL_TYPE))
10098 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10099 break;
10101 CASE_FLT_FN (BUILT_IN_YN):
10102 if (validate_arg (arg0, INTEGER_TYPE)
10103 && validate_arg (arg1, REAL_TYPE))
10104 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10105 &dconst0, false);
10106 break;
10108 CASE_FLT_FN (BUILT_IN_DREM):
10109 CASE_FLT_FN (BUILT_IN_REMAINDER):
10110 if (validate_arg (arg0, REAL_TYPE)
10111 && validate_arg (arg1, REAL_TYPE))
10112 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10113 break;
10115 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10116 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10117 if (validate_arg (arg0, REAL_TYPE)
10118 && validate_arg (arg1, POINTER_TYPE))
10119 return do_mpfr_lgamma_r (arg0, arg1, type);
10120 break;
10122 CASE_FLT_FN (BUILT_IN_ATAN2):
10123 if (validate_arg (arg0, REAL_TYPE)
10124 && validate_arg (arg1, REAL_TYPE))
10125 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10126 break;
10128 CASE_FLT_FN (BUILT_IN_FDIM):
10129 if (validate_arg (arg0, REAL_TYPE)
10130 && validate_arg (arg1, REAL_TYPE))
10131 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10132 break;
10134 CASE_FLT_FN (BUILT_IN_HYPOT):
10135 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10137 CASE_FLT_FN (BUILT_IN_CPOW):
10138 if (validate_arg (arg0, COMPLEX_TYPE)
10139 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10140 && validate_arg (arg1, COMPLEX_TYPE)
10141 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10142 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10143 break;
10145 CASE_FLT_FN (BUILT_IN_LDEXP):
10146 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10147 CASE_FLT_FN (BUILT_IN_SCALBN):
10148 CASE_FLT_FN (BUILT_IN_SCALBLN):
10149 return fold_builtin_load_exponent (loc, arg0, arg1,
10150 type, /*ldexp=*/false);
10152 CASE_FLT_FN (BUILT_IN_FREXP):
10153 return fold_builtin_frexp (loc, arg0, arg1, type);
10155 CASE_FLT_FN (BUILT_IN_MODF):
10156 return fold_builtin_modf (loc, arg0, arg1, type);
10158 case BUILT_IN_STRSTR:
10159 return fold_builtin_strstr (loc, arg0, arg1, type);
10161 case BUILT_IN_STRSPN:
10162 return fold_builtin_strspn (loc, arg0, arg1);
10164 case BUILT_IN_STRCSPN:
10165 return fold_builtin_strcspn (loc, arg0, arg1);
10167 case BUILT_IN_STRCHR:
10168 case BUILT_IN_INDEX:
10169 return fold_builtin_strchr (loc, arg0, arg1, type);
10171 case BUILT_IN_STRRCHR:
10172 case BUILT_IN_RINDEX:
10173 return fold_builtin_strrchr (loc, arg0, arg1, type);
10175 case BUILT_IN_STPCPY:
10176 if (ignore)
10178 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10179 if (!fn)
10180 break;
10182 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10184 else
10185 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10186 break;
10188 case BUILT_IN_STRCMP:
10189 return fold_builtin_strcmp (loc, arg0, arg1);
10191 case BUILT_IN_STRPBRK:
10192 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10194 case BUILT_IN_EXPECT:
10195 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10197 CASE_FLT_FN (BUILT_IN_POW):
10198 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10200 CASE_FLT_FN (BUILT_IN_POWI):
10201 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10203 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10204 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10206 CASE_FLT_FN (BUILT_IN_FMIN):
10207 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10209 CASE_FLT_FN (BUILT_IN_FMAX):
10210 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10212 case BUILT_IN_ISGREATER:
10213 return fold_builtin_unordered_cmp (loc, fndecl,
10214 arg0, arg1, UNLE_EXPR, LE_EXPR);
10215 case BUILT_IN_ISGREATEREQUAL:
10216 return fold_builtin_unordered_cmp (loc, fndecl,
10217 arg0, arg1, UNLT_EXPR, LT_EXPR);
10218 case BUILT_IN_ISLESS:
10219 return fold_builtin_unordered_cmp (loc, fndecl,
10220 arg0, arg1, UNGE_EXPR, GE_EXPR);
10221 case BUILT_IN_ISLESSEQUAL:
10222 return fold_builtin_unordered_cmp (loc, fndecl,
10223 arg0, arg1, UNGT_EXPR, GT_EXPR);
10224 case BUILT_IN_ISLESSGREATER:
10225 return fold_builtin_unordered_cmp (loc, fndecl,
10226 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10227 case BUILT_IN_ISUNORDERED:
10228 return fold_builtin_unordered_cmp (loc, fndecl,
10229 arg0, arg1, UNORDERED_EXPR,
10230 NOP_EXPR);
10232 /* We do the folding for va_start in the expander. */
10233 case BUILT_IN_VA_START:
10234 break;
10236 case BUILT_IN_OBJECT_SIZE:
10237 return fold_builtin_object_size (arg0, arg1);
10239 case BUILT_IN_PRINTF:
10240 case BUILT_IN_PRINTF_UNLOCKED:
10241 case BUILT_IN_VPRINTF:
10242 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10244 case BUILT_IN_PRINTF_CHK:
10245 case BUILT_IN_VPRINTF_CHK:
10246 if (!validate_arg (arg0, INTEGER_TYPE)
10247 || TREE_SIDE_EFFECTS (arg0))
10248 return NULL_TREE;
10249 else
10250 return fold_builtin_printf (loc, fndecl,
10251 arg1, NULL_TREE, ignore, fcode);
10252 break;
10254 case BUILT_IN_FPRINTF:
10255 case BUILT_IN_FPRINTF_UNLOCKED:
10256 case BUILT_IN_VFPRINTF:
10257 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10258 ignore, fcode);
10260 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10261 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10263 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10264 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10266 default:
10267 break;
10269 return NULL_TREE;
10272 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10273 and ARG2. IGNORE is true if the result of the function call is ignored.
10274 This function returns NULL_TREE if no simplification was possible. */
10276 static tree
10277 fold_builtin_3 (location_t loc, tree fndecl,
10278 tree arg0, tree arg1, tree arg2, bool ignore)
10280 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10281 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10282 switch (fcode)
10285 CASE_FLT_FN (BUILT_IN_SINCOS):
10286 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10288 CASE_FLT_FN (BUILT_IN_FMA):
10289 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10290 break;
10292 CASE_FLT_FN (BUILT_IN_REMQUO):
10293 if (validate_arg (arg0, REAL_TYPE)
10294 && validate_arg (arg1, REAL_TYPE)
10295 && validate_arg (arg2, POINTER_TYPE))
10296 return do_mpfr_remquo (arg0, arg1, arg2);
10297 break;
10299 case BUILT_IN_STRNCAT:
10300 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10302 case BUILT_IN_STRNCMP:
10303 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10305 case BUILT_IN_MEMCHR:
10306 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10308 case BUILT_IN_BCMP:
10309 case BUILT_IN_MEMCMP:
10310 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10312 case BUILT_IN_SNPRINTF:
10313 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10315 case BUILT_IN_STRCAT_CHK:
10316 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10318 case BUILT_IN_PRINTF_CHK:
10319 case BUILT_IN_VPRINTF_CHK:
10320 if (!validate_arg (arg0, INTEGER_TYPE)
10321 || TREE_SIDE_EFFECTS (arg0))
10322 return NULL_TREE;
10323 else
10324 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10325 break;
10327 case BUILT_IN_FPRINTF:
10328 case BUILT_IN_FPRINTF_UNLOCKED:
10329 case BUILT_IN_VFPRINTF:
10330 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10331 ignore, fcode);
10333 case BUILT_IN_FPRINTF_CHK:
10334 case BUILT_IN_VFPRINTF_CHK:
10335 if (!validate_arg (arg1, INTEGER_TYPE)
10336 || TREE_SIDE_EFFECTS (arg1))
10337 return NULL_TREE;
10338 else
10339 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10340 ignore, fcode);
10342 case BUILT_IN_EXPECT:
10343 return fold_builtin_expect (loc, arg0, arg1, arg2);
10345 default:
10346 break;
10348 return NULL_TREE;
10351 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10352 ARG2, and ARG3. IGNORE is true if the result of the function call is
10353 ignored. This function returns NULL_TREE if no simplification was
10354 possible. */
10356 static tree
10357 fold_builtin_4 (location_t loc, tree fndecl,
10358 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10360 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10362 switch (fcode)
10364 case BUILT_IN_STRNCAT_CHK:
10365 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10367 case BUILT_IN_SNPRINTF:
10368 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
10370 case BUILT_IN_FPRINTF_CHK:
10371 case BUILT_IN_VFPRINTF_CHK:
10372 if (!validate_arg (arg1, INTEGER_TYPE)
10373 || TREE_SIDE_EFFECTS (arg1))
10374 return NULL_TREE;
10375 else
10376 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10377 ignore, fcode);
10378 break;
10380 default:
10381 break;
10383 return NULL_TREE;
10386 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10387 arguments, where NARGS <= 4. IGNORE is true if the result of the
10388 function call is ignored. This function returns NULL_TREE if no
10389 simplification was possible. Note that this only folds builtins with
10390 fixed argument patterns. Foldings that do varargs-to-varargs
10391 transformations, or that match calls with more than 4 arguments,
10392 need to be handled with fold_builtin_varargs instead. */
10394 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10396 static tree
10397 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10399 tree ret = NULL_TREE;
10401 switch (nargs)
10403 case 0:
10404 ret = fold_builtin_0 (loc, fndecl, ignore);
10405 break;
10406 case 1:
10407 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10408 break;
10409 case 2:
10410 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10411 break;
10412 case 3:
10413 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10414 break;
10415 case 4:
10416 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10417 ignore);
10418 break;
10419 default:
10420 break;
10422 if (ret)
10424 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10425 SET_EXPR_LOCATION (ret, loc);
10426 TREE_NO_WARNING (ret) = 1;
10427 return ret;
10429 return NULL_TREE;
10432 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10433 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10434 of arguments in ARGS to be omitted. OLDNARGS is the number of
10435 elements in ARGS. */
10437 static tree
10438 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10439 int skip, tree fndecl, int n, va_list newargs)
10441 int nargs = oldnargs - skip + n;
10442 tree *buffer;
10444 if (n > 0)
10446 int i, j;
10448 buffer = XALLOCAVEC (tree, nargs);
10449 for (i = 0; i < n; i++)
10450 buffer[i] = va_arg (newargs, tree);
10451 for (j = skip; j < oldnargs; j++, i++)
10452 buffer[i] = args[j];
10454 else
10455 buffer = args + skip;
10457 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10460 /* Return true if FNDECL shouldn't be folded right now.
10461 If a built-in function has an inline attribute always_inline
10462 wrapper, defer folding it after always_inline functions have
10463 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10464 might not be performed. */
10466 bool
10467 avoid_folding_inline_builtin (tree fndecl)
10469 return (DECL_DECLARED_INLINE_P (fndecl)
10470 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10471 && cfun
10472 && !cfun->always_inline_functions_inlined
10473 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10476 /* A wrapper function for builtin folding that prevents warnings for
10477 "statement without effect" and the like, caused by removing the
10478 call node earlier than the warning is generated. */
10480 tree
10481 fold_call_expr (location_t loc, tree exp, bool ignore)
10483 tree ret = NULL_TREE;
10484 tree fndecl = get_callee_fndecl (exp);
10485 if (fndecl
10486 && TREE_CODE (fndecl) == FUNCTION_DECL
10487 && DECL_BUILT_IN (fndecl)
10488 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10489 yet. Defer folding until we see all the arguments
10490 (after inlining). */
10491 && !CALL_EXPR_VA_ARG_PACK (exp))
10493 int nargs = call_expr_nargs (exp);
10495 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10496 instead last argument is __builtin_va_arg_pack (). Defer folding
10497 even in that case, until arguments are finalized. */
10498 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10500 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10501 if (fndecl2
10502 && TREE_CODE (fndecl2) == FUNCTION_DECL
10503 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10504 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10505 return NULL_TREE;
10508 if (avoid_folding_inline_builtin (fndecl))
10509 return NULL_TREE;
10511 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10512 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10513 CALL_EXPR_ARGP (exp), ignore);
10514 else
10516 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10518 tree *args = CALL_EXPR_ARGP (exp);
10519 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10521 if (!ret)
10522 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10523 if (ret)
10524 return ret;
10527 return NULL_TREE;
10530 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10531 N arguments are passed in the array ARGARRAY. */
10533 tree
10534 fold_builtin_call_array (location_t loc, tree type,
10535 tree fn,
10536 int n,
10537 tree *argarray)
10539 tree ret = NULL_TREE;
10540 tree exp;
10542 if (TREE_CODE (fn) == ADDR_EXPR)
10544 tree fndecl = TREE_OPERAND (fn, 0);
10545 if (TREE_CODE (fndecl) == FUNCTION_DECL
10546 && DECL_BUILT_IN (fndecl))
10548 /* If last argument is __builtin_va_arg_pack (), arguments to this
10549 function are not finalized yet. Defer folding until they are. */
10550 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10552 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10553 if (fndecl2
10554 && TREE_CODE (fndecl2) == FUNCTION_DECL
10555 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10556 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10557 return build_call_array_loc (loc, type, fn, n, argarray);
10559 if (avoid_folding_inline_builtin (fndecl))
10560 return build_call_array_loc (loc, type, fn, n, argarray);
10561 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10563 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10564 if (ret)
10565 return ret;
10567 return build_call_array_loc (loc, type, fn, n, argarray);
10569 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10571 /* First try the transformations that don't require consing up
10572 an exp. */
10573 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10574 if (ret)
10575 return ret;
10578 /* If we got this far, we need to build an exp. */
10579 exp = build_call_array_loc (loc, type, fn, n, argarray);
10580 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10581 return ret ? ret : exp;
10585 return build_call_array_loc (loc, type, fn, n, argarray);
10588 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10589 along with N new arguments specified as the "..." parameters. SKIP
10590 is the number of arguments in EXP to be omitted. This function is used
10591 to do varargs-to-varargs transformations. */
10593 static tree
10594 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10596 va_list ap;
10597 tree t;
10599 va_start (ap, n);
10600 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10601 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10602 va_end (ap);
10604 return t;
10607 /* Validate a single argument ARG against a tree code CODE representing
10608 a type. */
10610 static bool
10611 validate_arg (const_tree arg, enum tree_code code)
10613 if (!arg)
10614 return false;
10615 else if (code == POINTER_TYPE)
10616 return POINTER_TYPE_P (TREE_TYPE (arg));
10617 else if (code == INTEGER_TYPE)
10618 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10619 return code == TREE_CODE (TREE_TYPE (arg));
10622 /* This function validates the types of a function call argument list
10623 against a specified list of tree_codes. If the last specifier is a 0,
10624 that represents an ellipses, otherwise the last specifier must be a
10625 VOID_TYPE.
10627 This is the GIMPLE version of validate_arglist. Eventually we want to
10628 completely convert builtins.c to work from GIMPLEs and the tree based
10629 validate_arglist will then be removed. */
10631 bool
10632 validate_gimple_arglist (const_gimple call, ...)
10634 enum tree_code code;
10635 bool res = 0;
10636 va_list ap;
10637 const_tree arg;
10638 size_t i;
10640 va_start (ap, call);
10641 i = 0;
10645 code = (enum tree_code) va_arg (ap, int);
10646 switch (code)
10648 case 0:
10649 /* This signifies an ellipses, any further arguments are all ok. */
10650 res = true;
10651 goto end;
10652 case VOID_TYPE:
10653 /* This signifies an endlink, if no arguments remain, return
10654 true, otherwise return false. */
10655 res = (i == gimple_call_num_args (call));
10656 goto end;
10657 default:
10658 /* If no parameters remain or the parameter's code does not
10659 match the specified code, return false. Otherwise continue
10660 checking any remaining arguments. */
10661 arg = gimple_call_arg (call, i++);
10662 if (!validate_arg (arg, code))
10663 goto end;
10664 break;
10667 while (1);
10669 /* We need gotos here since we can only have one VA_CLOSE in a
10670 function. */
10671 end: ;
10672 va_end (ap);
10674 return res;
10677 /* Default target-specific builtin expander that does nothing. */
10680 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10681 rtx target ATTRIBUTE_UNUSED,
10682 rtx subtarget ATTRIBUTE_UNUSED,
10683 enum machine_mode mode ATTRIBUTE_UNUSED,
10684 int ignore ATTRIBUTE_UNUSED)
10686 return NULL_RTX;
10689 /* Returns true is EXP represents data that would potentially reside
10690 in a readonly section. */
10692 bool
10693 readonly_data_expr (tree exp)
10695 STRIP_NOPS (exp);
10697 if (TREE_CODE (exp) != ADDR_EXPR)
10698 return false;
10700 exp = get_base_address (TREE_OPERAND (exp, 0));
10701 if (!exp)
10702 return false;
10704 /* Make sure we call decl_readonly_section only for trees it
10705 can handle (since it returns true for everything it doesn't
10706 understand). */
10707 if (TREE_CODE (exp) == STRING_CST
10708 || TREE_CODE (exp) == CONSTRUCTOR
10709 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10710 return decl_readonly_section (exp, 0);
10711 else
10712 return false;
10715 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10716 to the call, and TYPE is its return type.
10718 Return NULL_TREE if no simplification was possible, otherwise return the
10719 simplified form of the call as a tree.
10721 The simplified form may be a constant or other expression which
10722 computes the same value, but in a more efficient manner (including
10723 calls to other builtin functions).
10725 The call may contain arguments which need to be evaluated, but
10726 which are not useful to determine the result of the call. In
10727 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10728 COMPOUND_EXPR will be an argument which must be evaluated.
10729 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10730 COMPOUND_EXPR in the chain will contain the tree for the simplified
10731 form of the builtin function call. */
10733 static tree
10734 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10736 if (!validate_arg (s1, POINTER_TYPE)
10737 || !validate_arg (s2, POINTER_TYPE))
10738 return NULL_TREE;
10739 else
10741 tree fn;
10742 const char *p1, *p2;
10744 p2 = c_getstr (s2);
10745 if (p2 == NULL)
10746 return NULL_TREE;
10748 p1 = c_getstr (s1);
10749 if (p1 != NULL)
10751 const char *r = strstr (p1, p2);
10752 tree tem;
10754 if (r == NULL)
10755 return build_int_cst (TREE_TYPE (s1), 0);
10757 /* Return an offset into the constant string argument. */
10758 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10759 return fold_convert_loc (loc, type, tem);
10762 /* The argument is const char *, and the result is char *, so we need
10763 a type conversion here to avoid a warning. */
10764 if (p2[0] == '\0')
10765 return fold_convert_loc (loc, type, s1);
10767 if (p2[1] != '\0')
10768 return NULL_TREE;
10770 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10771 if (!fn)
10772 return NULL_TREE;
10774 /* New argument list transforming strstr(s1, s2) to
10775 strchr(s1, s2[0]). */
10776 return build_call_expr_loc (loc, fn, 2, s1,
10777 build_int_cst (integer_type_node, p2[0]));
10781 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10782 the call, and TYPE is its return type.
10784 Return NULL_TREE if no simplification was possible, otherwise return the
10785 simplified form of the call as a tree.
10787 The simplified form may be a constant or other expression which
10788 computes the same value, but in a more efficient manner (including
10789 calls to other builtin functions).
10791 The call may contain arguments which need to be evaluated, but
10792 which are not useful to determine the result of the call. In
10793 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10794 COMPOUND_EXPR will be an argument which must be evaluated.
10795 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10796 COMPOUND_EXPR in the chain will contain the tree for the simplified
10797 form of the builtin function call. */
10799 static tree
10800 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10802 if (!validate_arg (s1, POINTER_TYPE)
10803 || !validate_arg (s2, INTEGER_TYPE))
10804 return NULL_TREE;
10805 else
10807 const char *p1;
10809 if (TREE_CODE (s2) != INTEGER_CST)
10810 return NULL_TREE;
10812 p1 = c_getstr (s1);
10813 if (p1 != NULL)
10815 char c;
10816 const char *r;
10817 tree tem;
10819 if (target_char_cast (s2, &c))
10820 return NULL_TREE;
10822 r = strchr (p1, c);
10824 if (r == NULL)
10825 return build_int_cst (TREE_TYPE (s1), 0);
10827 /* Return an offset into the constant string argument. */
10828 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10829 return fold_convert_loc (loc, type, tem);
10831 return NULL_TREE;
10835 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10836 the call, and TYPE is its return type.
10838 Return NULL_TREE if no simplification was possible, otherwise return the
10839 simplified form of the call as a tree.
10841 The simplified form may be a constant or other expression which
10842 computes the same value, but in a more efficient manner (including
10843 calls to other builtin functions).
10845 The call may contain arguments which need to be evaluated, but
10846 which are not useful to determine the result of the call. In
10847 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10848 COMPOUND_EXPR will be an argument which must be evaluated.
10849 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10850 COMPOUND_EXPR in the chain will contain the tree for the simplified
10851 form of the builtin function call. */
10853 static tree
10854 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10856 if (!validate_arg (s1, POINTER_TYPE)
10857 || !validate_arg (s2, INTEGER_TYPE))
10858 return NULL_TREE;
10859 else
10861 tree fn;
10862 const char *p1;
10864 if (TREE_CODE (s2) != INTEGER_CST)
10865 return NULL_TREE;
10867 p1 = c_getstr (s1);
10868 if (p1 != NULL)
10870 char c;
10871 const char *r;
10872 tree tem;
10874 if (target_char_cast (s2, &c))
10875 return NULL_TREE;
10877 r = strrchr (p1, c);
10879 if (r == NULL)
10880 return build_int_cst (TREE_TYPE (s1), 0);
10882 /* Return an offset into the constant string argument. */
10883 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10884 return fold_convert_loc (loc, type, tem);
10887 if (! integer_zerop (s2))
10888 return NULL_TREE;
10890 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10891 if (!fn)
10892 return NULL_TREE;
10894 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10895 return build_call_expr_loc (loc, fn, 2, s1, s2);
10899 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10900 to the call, and TYPE is its return type.
10902 Return NULL_TREE if no simplification was possible, otherwise return the
10903 simplified form of the call as a tree.
10905 The simplified form may be a constant or other expression which
10906 computes the same value, but in a more efficient manner (including
10907 calls to other builtin functions).
10909 The call may contain arguments which need to be evaluated, but
10910 which are not useful to determine the result of the call. In
10911 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10912 COMPOUND_EXPR will be an argument which must be evaluated.
10913 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10914 COMPOUND_EXPR in the chain will contain the tree for the simplified
10915 form of the builtin function call. */
10917 static tree
10918 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10920 if (!validate_arg (s1, POINTER_TYPE)
10921 || !validate_arg (s2, POINTER_TYPE))
10922 return NULL_TREE;
10923 else
10925 tree fn;
10926 const char *p1, *p2;
10928 p2 = c_getstr (s2);
10929 if (p2 == NULL)
10930 return NULL_TREE;
10932 p1 = c_getstr (s1);
10933 if (p1 != NULL)
10935 const char *r = strpbrk (p1, p2);
10936 tree tem;
10938 if (r == NULL)
10939 return build_int_cst (TREE_TYPE (s1), 0);
10941 /* Return an offset into the constant string argument. */
10942 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10943 return fold_convert_loc (loc, type, tem);
10946 if (p2[0] == '\0')
10947 /* strpbrk(x, "") == NULL.
10948 Evaluate and ignore s1 in case it had side-effects. */
10949 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
10951 if (p2[1] != '\0')
10952 return NULL_TREE; /* Really call strpbrk. */
10954 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10955 if (!fn)
10956 return NULL_TREE;
10958 /* New argument list transforming strpbrk(s1, s2) to
10959 strchr(s1, s2[0]). */
10960 return build_call_expr_loc (loc, fn, 2, s1,
10961 build_int_cst (integer_type_node, p2[0]));
10965 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10966 arguments to the call.
10968 Return NULL_TREE if no simplification was possible, otherwise return the
10969 simplified form of the call as a tree.
10971 The simplified form may be a constant or other expression which
10972 computes the same value, but in a more efficient manner (including
10973 calls to other builtin functions).
10975 The call may contain arguments which need to be evaluated, but
10976 which are not useful to determine the result of the call. In
10977 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10978 COMPOUND_EXPR will be an argument which must be evaluated.
10979 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10980 COMPOUND_EXPR in the chain will contain the tree for the simplified
10981 form of the builtin function call. */
10983 static tree
10984 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
10986 if (!validate_arg (dst, POINTER_TYPE)
10987 || !validate_arg (src, POINTER_TYPE)
10988 || !validate_arg (len, INTEGER_TYPE))
10989 return NULL_TREE;
10990 else
10992 const char *p = c_getstr (src);
10994 /* If the requested length is zero, or the src parameter string
10995 length is zero, return the dst parameter. */
10996 if (integer_zerop (len) || (p && *p == '\0'))
10997 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
10999 /* If the requested len is greater than or equal to the string
11000 length, call strcat. */
11001 if (TREE_CODE (len) == INTEGER_CST && p
11002 && compare_tree_int (len, strlen (p)) >= 0)
11004 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11006 /* If the replacement _DECL isn't initialized, don't do the
11007 transformation. */
11008 if (!fn)
11009 return NULL_TREE;
11011 return build_call_expr_loc (loc, fn, 2, dst, src);
11013 return NULL_TREE;
11017 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11018 to the call.
11020 Return NULL_TREE if no simplification was possible, otherwise return the
11021 simplified form of the call as a tree.
11023 The simplified form may be a constant or other expression which
11024 computes the same value, but in a more efficient manner (including
11025 calls to other builtin functions).
11027 The call may contain arguments which need to be evaluated, but
11028 which are not useful to determine the result of the call. In
11029 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11030 COMPOUND_EXPR will be an argument which must be evaluated.
11031 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11032 COMPOUND_EXPR in the chain will contain the tree for the simplified
11033 form of the builtin function call. */
11035 static tree
11036 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11038 if (!validate_arg (s1, POINTER_TYPE)
11039 || !validate_arg (s2, POINTER_TYPE))
11040 return NULL_TREE;
11041 else
11043 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11045 /* If both arguments are constants, evaluate at compile-time. */
11046 if (p1 && p2)
11048 const size_t r = strspn (p1, p2);
11049 return build_int_cst (size_type_node, r);
11052 /* If either argument is "", return NULL_TREE. */
11053 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11054 /* Evaluate and ignore both arguments in case either one has
11055 side-effects. */
11056 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11057 s1, s2);
11058 return NULL_TREE;
11062 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11063 to the call.
11065 Return NULL_TREE if no simplification was possible, otherwise return the
11066 simplified form of the call as a tree.
11068 The simplified form may be a constant or other expression which
11069 computes the same value, but in a more efficient manner (including
11070 calls to other builtin functions).
11072 The call may contain arguments which need to be evaluated, but
11073 which are not useful to determine the result of the call. In
11074 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11075 COMPOUND_EXPR will be an argument which must be evaluated.
11076 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11077 COMPOUND_EXPR in the chain will contain the tree for the simplified
11078 form of the builtin function call. */
11080 static tree
11081 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11083 if (!validate_arg (s1, POINTER_TYPE)
11084 || !validate_arg (s2, POINTER_TYPE))
11085 return NULL_TREE;
11086 else
11088 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11090 /* If both arguments are constants, evaluate at compile-time. */
11091 if (p1 && p2)
11093 const size_t r = strcspn (p1, p2);
11094 return build_int_cst (size_type_node, r);
11097 /* If the first argument is "", return NULL_TREE. */
11098 if (p1 && *p1 == '\0')
11100 /* Evaluate and ignore argument s2 in case it has
11101 side-effects. */
11102 return omit_one_operand_loc (loc, size_type_node,
11103 size_zero_node, s2);
11106 /* If the second argument is "", return __builtin_strlen(s1). */
11107 if (p2 && *p2 == '\0')
11109 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11111 /* If the replacement _DECL isn't initialized, don't do the
11112 transformation. */
11113 if (!fn)
11114 return NULL_TREE;
11116 return build_call_expr_loc (loc, fn, 1, s1);
11118 return NULL_TREE;
11122 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11123 produced. False otherwise. This is done so that we don't output the error
11124 or warning twice or three times. */
11126 bool
11127 fold_builtin_next_arg (tree exp, bool va_start_p)
11129 tree fntype = TREE_TYPE (current_function_decl);
11130 int nargs = call_expr_nargs (exp);
11131 tree arg;
11132 /* There is good chance the current input_location points inside the
11133 definition of the va_start macro (perhaps on the token for
11134 builtin) in a system header, so warnings will not be emitted.
11135 Use the location in real source code. */
11136 source_location current_location =
11137 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11138 NULL);
11140 if (!stdarg_p (fntype))
11142 error ("%<va_start%> used in function with fixed args");
11143 return true;
11146 if (va_start_p)
11148 if (va_start_p && (nargs != 2))
11150 error ("wrong number of arguments to function %<va_start%>");
11151 return true;
11153 arg = CALL_EXPR_ARG (exp, 1);
11155 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11156 when we checked the arguments and if needed issued a warning. */
11157 else
11159 if (nargs == 0)
11161 /* Evidently an out of date version of <stdarg.h>; can't validate
11162 va_start's second argument, but can still work as intended. */
11163 warning_at (current_location,
11164 OPT_Wvarargs,
11165 "%<__builtin_next_arg%> called without an argument");
11166 return true;
11168 else if (nargs > 1)
11170 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11171 return true;
11173 arg = CALL_EXPR_ARG (exp, 0);
11176 if (TREE_CODE (arg) == SSA_NAME)
11177 arg = SSA_NAME_VAR (arg);
11179 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11180 or __builtin_next_arg (0) the first time we see it, after checking
11181 the arguments and if needed issuing a warning. */
11182 if (!integer_zerop (arg))
11184 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11186 /* Strip off all nops for the sake of the comparison. This
11187 is not quite the same as STRIP_NOPS. It does more.
11188 We must also strip off INDIRECT_EXPR for C++ reference
11189 parameters. */
11190 while (CONVERT_EXPR_P (arg)
11191 || TREE_CODE (arg) == INDIRECT_REF)
11192 arg = TREE_OPERAND (arg, 0);
11193 if (arg != last_parm)
11195 /* FIXME: Sometimes with the tree optimizers we can get the
11196 not the last argument even though the user used the last
11197 argument. We just warn and set the arg to be the last
11198 argument so that we will get wrong-code because of
11199 it. */
11200 warning_at (current_location,
11201 OPT_Wvarargs,
11202 "second parameter of %<va_start%> not last named argument");
11205 /* Undefined by C99 7.15.1.4p4 (va_start):
11206 "If the parameter parmN is declared with the register storage
11207 class, with a function or array type, or with a type that is
11208 not compatible with the type that results after application of
11209 the default argument promotions, the behavior is undefined."
11211 else if (DECL_REGISTER (arg))
11213 warning_at (current_location,
11214 OPT_Wvarargs,
11215 "undefined behaviour when second parameter of "
11216 "%<va_start%> is declared with %<register%> storage");
11219 /* We want to verify the second parameter just once before the tree
11220 optimizers are run and then avoid keeping it in the tree,
11221 as otherwise we could warn even for correct code like:
11222 void foo (int i, ...)
11223 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11224 if (va_start_p)
11225 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11226 else
11227 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11229 return false;
11233 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
11234 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
11235 attempt to simplify calls with more than 4 arguments.
11237 Return NULL_TREE if no simplification was possible, otherwise return the
11238 simplified form of the call as a tree. If IGNORED is true, it means that
11239 the caller does not use the returned value of the function. */
11241 static tree
11242 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
11243 tree orig, int ignored)
11245 tree call, retval;
11246 const char *fmt_str = NULL;
11247 unsigned HOST_WIDE_INT destlen;
11249 /* Verify the required arguments in the original call. We deal with two
11250 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
11251 'snprintf (dest, cst, "%s", orig)'. */
11252 if (!validate_arg (dest, POINTER_TYPE)
11253 || !validate_arg (destsize, INTEGER_TYPE)
11254 || !validate_arg (fmt, POINTER_TYPE))
11255 return NULL_TREE;
11256 if (orig && !validate_arg (orig, POINTER_TYPE))
11257 return NULL_TREE;
11259 if (!tree_fits_uhwi_p (destsize))
11260 return NULL_TREE;
11262 /* Check whether the format is a literal string constant. */
11263 fmt_str = c_getstr (fmt);
11264 if (fmt_str == NULL)
11265 return NULL_TREE;
11267 call = NULL_TREE;
11268 retval = NULL_TREE;
11270 if (!init_target_chars ())
11271 return NULL_TREE;
11273 destlen = tree_to_uhwi (destsize);
11275 /* If the format doesn't contain % args or %%, use strcpy. */
11276 if (strchr (fmt_str, target_percent) == NULL)
11278 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11279 size_t len = strlen (fmt_str);
11281 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
11282 if (orig)
11283 return NULL_TREE;
11285 /* We could expand this as
11286 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
11287 or to
11288 memcpy (str, fmt_with_nul_at_cstm1, cst);
11289 but in the former case that might increase code size
11290 and in the latter case grow .rodata section too much.
11291 So punt for now. */
11292 if (len >= destlen)
11293 return NULL_TREE;
11295 if (!fn)
11296 return NULL_TREE;
11298 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
11299 'format' is known to contain no % formats and
11300 strlen (fmt) < cst. */
11301 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11303 if (!ignored)
11304 retval = build_int_cst (integer_type_node, strlen (fmt_str));
11307 /* If the format is "%s", use strcpy if the result isn't used. */
11308 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11310 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11311 unsigned HOST_WIDE_INT origlen;
11313 /* Don't crash on snprintf (str1, cst, "%s"). */
11314 if (!orig)
11315 return NULL_TREE;
11317 retval = c_strlen (orig, 1);
11318 if (!retval || !tree_fits_uhwi_p (retval))
11319 return NULL_TREE;
11321 origlen = tree_to_uhwi (retval);
11322 /* We could expand this as
11323 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
11324 or to
11325 memcpy (str1, str2_with_nul_at_cstm1, cst);
11326 but in the former case that might increase code size
11327 and in the latter case grow .rodata section too much.
11328 So punt for now. */
11329 if (origlen >= destlen)
11330 return NULL_TREE;
11332 /* Convert snprintf (str1, cst, "%s", str2) into
11333 strcpy (str1, str2) if strlen (str2) < cst. */
11334 if (!fn)
11335 return NULL_TREE;
11337 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11339 if (ignored)
11340 retval = NULL_TREE;
11343 if (call && retval)
11345 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
11346 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
11347 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11349 else
11350 return call;
11353 /* Expand a call EXP to __builtin_object_size. */
11355 static rtx
11356 expand_builtin_object_size (tree exp)
11358 tree ost;
11359 int object_size_type;
11360 tree fndecl = get_callee_fndecl (exp);
11362 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11364 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11365 exp, fndecl);
11366 expand_builtin_trap ();
11367 return const0_rtx;
11370 ost = CALL_EXPR_ARG (exp, 1);
11371 STRIP_NOPS (ost);
11373 if (TREE_CODE (ost) != INTEGER_CST
11374 || tree_int_cst_sgn (ost) < 0
11375 || compare_tree_int (ost, 3) > 0)
11377 error ("%Klast argument of %D is not integer constant between 0 and 3",
11378 exp, fndecl);
11379 expand_builtin_trap ();
11380 return const0_rtx;
11383 object_size_type = tree_to_shwi (ost);
11385 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11388 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11389 FCODE is the BUILT_IN_* to use.
11390 Return NULL_RTX if we failed; the caller should emit a normal call,
11391 otherwise try to get the result in TARGET, if convenient (and in
11392 mode MODE if that's convenient). */
11394 static rtx
11395 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11396 enum built_in_function fcode)
11398 tree dest, src, len, size;
11400 if (!validate_arglist (exp,
11401 POINTER_TYPE,
11402 fcode == BUILT_IN_MEMSET_CHK
11403 ? INTEGER_TYPE : POINTER_TYPE,
11404 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11405 return NULL_RTX;
11407 dest = CALL_EXPR_ARG (exp, 0);
11408 src = CALL_EXPR_ARG (exp, 1);
11409 len = CALL_EXPR_ARG (exp, 2);
11410 size = CALL_EXPR_ARG (exp, 3);
11412 if (! tree_fits_uhwi_p (size))
11413 return NULL_RTX;
11415 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11417 tree fn;
11419 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11421 warning_at (tree_nonartificial_location (exp),
11422 0, "%Kcall to %D will always overflow destination buffer",
11423 exp, get_callee_fndecl (exp));
11424 return NULL_RTX;
11427 fn = NULL_TREE;
11428 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11429 mem{cpy,pcpy,move,set} is available. */
11430 switch (fcode)
11432 case BUILT_IN_MEMCPY_CHK:
11433 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11434 break;
11435 case BUILT_IN_MEMPCPY_CHK:
11436 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11437 break;
11438 case BUILT_IN_MEMMOVE_CHK:
11439 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11440 break;
11441 case BUILT_IN_MEMSET_CHK:
11442 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11443 break;
11444 default:
11445 break;
11448 if (! fn)
11449 return NULL_RTX;
11451 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11452 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11453 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11454 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11456 else if (fcode == BUILT_IN_MEMSET_CHK)
11457 return NULL_RTX;
11458 else
11460 unsigned int dest_align = get_pointer_alignment (dest);
11462 /* If DEST is not a pointer type, call the normal function. */
11463 if (dest_align == 0)
11464 return NULL_RTX;
11466 /* If SRC and DEST are the same (and not volatile), do nothing. */
11467 if (operand_equal_p (src, dest, 0))
11469 tree expr;
11471 if (fcode != BUILT_IN_MEMPCPY_CHK)
11473 /* Evaluate and ignore LEN in case it has side-effects. */
11474 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11475 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11478 expr = fold_build_pointer_plus (dest, len);
11479 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11482 /* __memmove_chk special case. */
11483 if (fcode == BUILT_IN_MEMMOVE_CHK)
11485 unsigned int src_align = get_pointer_alignment (src);
11487 if (src_align == 0)
11488 return NULL_RTX;
11490 /* If src is categorized for a readonly section we can use
11491 normal __memcpy_chk. */
11492 if (readonly_data_expr (src))
11494 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11495 if (!fn)
11496 return NULL_RTX;
11497 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11498 dest, src, len, size);
11499 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11500 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11501 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11504 return NULL_RTX;
11508 /* Emit warning if a buffer overflow is detected at compile time. */
11510 static void
11511 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11513 int is_strlen = 0;
11514 tree len, size;
11515 location_t loc = tree_nonartificial_location (exp);
11517 switch (fcode)
11519 case BUILT_IN_STRCPY_CHK:
11520 case BUILT_IN_STPCPY_CHK:
11521 /* For __strcat_chk the warning will be emitted only if overflowing
11522 by at least strlen (dest) + 1 bytes. */
11523 case BUILT_IN_STRCAT_CHK:
11524 len = CALL_EXPR_ARG (exp, 1);
11525 size = CALL_EXPR_ARG (exp, 2);
11526 is_strlen = 1;
11527 break;
11528 case BUILT_IN_STRNCAT_CHK:
11529 case BUILT_IN_STRNCPY_CHK:
11530 case BUILT_IN_STPNCPY_CHK:
11531 len = CALL_EXPR_ARG (exp, 2);
11532 size = CALL_EXPR_ARG (exp, 3);
11533 break;
11534 case BUILT_IN_SNPRINTF_CHK:
11535 case BUILT_IN_VSNPRINTF_CHK:
11536 len = CALL_EXPR_ARG (exp, 1);
11537 size = CALL_EXPR_ARG (exp, 3);
11538 break;
11539 default:
11540 gcc_unreachable ();
11543 if (!len || !size)
11544 return;
11546 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11547 return;
11549 if (is_strlen)
11551 len = c_strlen (len, 1);
11552 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11553 return;
11555 else if (fcode == BUILT_IN_STRNCAT_CHK)
11557 tree src = CALL_EXPR_ARG (exp, 1);
11558 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11559 return;
11560 src = c_strlen (src, 1);
11561 if (! src || ! tree_fits_uhwi_p (src))
11563 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11564 exp, get_callee_fndecl (exp));
11565 return;
11567 else if (tree_int_cst_lt (src, size))
11568 return;
11570 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11571 return;
11573 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11574 exp, get_callee_fndecl (exp));
11577 /* Emit warning if a buffer overflow is detected at compile time
11578 in __sprintf_chk/__vsprintf_chk calls. */
11580 static void
11581 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11583 tree size, len, fmt;
11584 const char *fmt_str;
11585 int nargs = call_expr_nargs (exp);
11587 /* Verify the required arguments in the original call. */
11589 if (nargs < 4)
11590 return;
11591 size = CALL_EXPR_ARG (exp, 2);
11592 fmt = CALL_EXPR_ARG (exp, 3);
11594 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11595 return;
11597 /* Check whether the format is a literal string constant. */
11598 fmt_str = c_getstr (fmt);
11599 if (fmt_str == NULL)
11600 return;
11602 if (!init_target_chars ())
11603 return;
11605 /* If the format doesn't contain % args or %%, we know its size. */
11606 if (strchr (fmt_str, target_percent) == 0)
11607 len = build_int_cstu (size_type_node, strlen (fmt_str));
11608 /* If the format is "%s" and first ... argument is a string literal,
11609 we know it too. */
11610 else if (fcode == BUILT_IN_SPRINTF_CHK
11611 && strcmp (fmt_str, target_percent_s) == 0)
11613 tree arg;
11615 if (nargs < 5)
11616 return;
11617 arg = CALL_EXPR_ARG (exp, 4);
11618 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11619 return;
11621 len = c_strlen (arg, 1);
11622 if (!len || ! tree_fits_uhwi_p (len))
11623 return;
11625 else
11626 return;
11628 if (! tree_int_cst_lt (len, size))
11629 warning_at (tree_nonartificial_location (exp),
11630 0, "%Kcall to %D will always overflow destination buffer",
11631 exp, get_callee_fndecl (exp));
11634 /* Emit warning if a free is called with address of a variable. */
11636 static void
11637 maybe_emit_free_warning (tree exp)
11639 tree arg = CALL_EXPR_ARG (exp, 0);
11641 STRIP_NOPS (arg);
11642 if (TREE_CODE (arg) != ADDR_EXPR)
11643 return;
11645 arg = get_base_address (TREE_OPERAND (arg, 0));
11646 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11647 return;
11649 if (SSA_VAR_P (arg))
11650 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11651 "%Kattempt to free a non-heap object %qD", exp, arg);
11652 else
11653 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11654 "%Kattempt to free a non-heap object", exp);
11657 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11658 if possible. */
11660 static tree
11661 fold_builtin_object_size (tree ptr, tree ost)
11663 unsigned HOST_WIDE_INT bytes;
11664 int object_size_type;
11666 if (!validate_arg (ptr, POINTER_TYPE)
11667 || !validate_arg (ost, INTEGER_TYPE))
11668 return NULL_TREE;
11670 STRIP_NOPS (ost);
11672 if (TREE_CODE (ost) != INTEGER_CST
11673 || tree_int_cst_sgn (ost) < 0
11674 || compare_tree_int (ost, 3) > 0)
11675 return NULL_TREE;
11677 object_size_type = tree_to_shwi (ost);
11679 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11680 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11681 and (size_t) 0 for types 2 and 3. */
11682 if (TREE_SIDE_EFFECTS (ptr))
11683 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11685 if (TREE_CODE (ptr) == ADDR_EXPR)
11687 bytes = compute_builtin_object_size (ptr, object_size_type);
11688 if (wi::fits_to_tree_p (bytes, size_type_node))
11689 return build_int_cstu (size_type_node, bytes);
11691 else if (TREE_CODE (ptr) == SSA_NAME)
11693 /* If object size is not known yet, delay folding until
11694 later. Maybe subsequent passes will help determining
11695 it. */
11696 bytes = compute_builtin_object_size (ptr, object_size_type);
11697 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11698 && wi::fits_to_tree_p (bytes, size_type_node))
11699 return build_int_cstu (size_type_node, bytes);
11702 return NULL_TREE;
11705 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
11706 are the arguments to the call. */
11708 static tree
11709 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
11710 tree src, tree size)
11712 tree fn;
11713 const char *p;
11715 if (!validate_arg (dest, POINTER_TYPE)
11716 || !validate_arg (src, POINTER_TYPE)
11717 || !validate_arg (size, INTEGER_TYPE))
11718 return NULL_TREE;
11720 p = c_getstr (src);
11721 /* If the SRC parameter is "", return DEST. */
11722 if (p && *p == '\0')
11723 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11725 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
11726 return NULL_TREE;
11728 /* If __builtin_strcat_chk is used, assume strcat is available. */
11729 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
11730 if (!fn)
11731 return NULL_TREE;
11733 return build_call_expr_loc (loc, fn, 2, dest, src);
11736 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
11737 LEN, and SIZE. */
11739 static tree
11740 fold_builtin_strncat_chk (location_t loc, tree fndecl,
11741 tree dest, tree src, tree len, tree size)
11743 tree fn;
11744 const char *p;
11746 if (!validate_arg (dest, POINTER_TYPE)
11747 || !validate_arg (src, POINTER_TYPE)
11748 || !validate_arg (size, INTEGER_TYPE)
11749 || !validate_arg (size, INTEGER_TYPE))
11750 return NULL_TREE;
11752 p = c_getstr (src);
11753 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
11754 if (p && *p == '\0')
11755 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11756 else if (integer_zerop (len))
11757 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11759 if (! tree_fits_uhwi_p (size))
11760 return NULL_TREE;
11762 if (! integer_all_onesp (size))
11764 tree src_len = c_strlen (src, 1);
11765 if (src_len
11766 && tree_fits_uhwi_p (src_len)
11767 && tree_fits_uhwi_p (len)
11768 && ! tree_int_cst_lt (len, src_len))
11770 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
11771 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
11772 if (!fn)
11773 return NULL_TREE;
11775 return build_call_expr_loc (loc, fn, 3, dest, src, size);
11777 return NULL_TREE;
11780 /* If __builtin_strncat_chk is used, assume strncat is available. */
11781 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
11782 if (!fn)
11783 return NULL_TREE;
11785 return build_call_expr_loc (loc, fn, 3, dest, src, len);
11788 /* Builtins with folding operations that operate on "..." arguments
11789 need special handling; we need to store the arguments in a convenient
11790 data structure before attempting any folding. Fortunately there are
11791 only a few builtins that fall into this category. FNDECL is the
11792 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11793 result of the function call is ignored. */
11795 static tree
11796 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11797 bool ignore ATTRIBUTE_UNUSED)
11799 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11800 tree ret = NULL_TREE;
11802 switch (fcode)
11804 case BUILT_IN_FPCLASSIFY:
11805 ret = fold_builtin_fpclassify (loc, exp);
11806 break;
11808 default:
11809 break;
11811 if (ret)
11813 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11814 SET_EXPR_LOCATION (ret, loc);
11815 TREE_NO_WARNING (ret) = 1;
11816 return ret;
11818 return NULL_TREE;
11821 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
11822 FMT and ARG are the arguments to the call; we don't fold cases with
11823 more than 2 arguments, and ARG may be null if this is a 1-argument case.
11825 Return NULL_TREE if no simplification was possible, otherwise return the
11826 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11827 code of the function to be simplified. */
11829 static tree
11830 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
11831 tree arg, bool ignore,
11832 enum built_in_function fcode)
11834 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
11835 const char *fmt_str = NULL;
11837 /* If the return value is used, don't do the transformation. */
11838 if (! ignore)
11839 return NULL_TREE;
11841 /* Verify the required arguments in the original call. */
11842 if (!validate_arg (fmt, POINTER_TYPE))
11843 return NULL_TREE;
11845 /* Check whether the format is a literal string constant. */
11846 fmt_str = c_getstr (fmt);
11847 if (fmt_str == NULL)
11848 return NULL_TREE;
11850 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
11852 /* If we're using an unlocked function, assume the other
11853 unlocked functions exist explicitly. */
11854 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
11855 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
11857 else
11859 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
11860 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
11863 if (!init_target_chars ())
11864 return NULL_TREE;
11866 if (strcmp (fmt_str, target_percent_s) == 0
11867 || strchr (fmt_str, target_percent) == NULL)
11869 const char *str;
11871 if (strcmp (fmt_str, target_percent_s) == 0)
11873 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11874 return NULL_TREE;
11876 if (!arg || !validate_arg (arg, POINTER_TYPE))
11877 return NULL_TREE;
11879 str = c_getstr (arg);
11880 if (str == NULL)
11881 return NULL_TREE;
11883 else
11885 /* The format specifier doesn't contain any '%' characters. */
11886 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
11887 && arg)
11888 return NULL_TREE;
11889 str = fmt_str;
11892 /* If the string was "", printf does nothing. */
11893 if (str[0] == '\0')
11894 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
11896 /* If the string has length of 1, call putchar. */
11897 if (str[1] == '\0')
11899 /* Given printf("c"), (where c is any one character,)
11900 convert "c"[0] to an int and pass that to the replacement
11901 function. */
11902 newarg = build_int_cst (integer_type_node, str[0]);
11903 if (fn_putchar)
11904 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
11906 else
11908 /* If the string was "string\n", call puts("string"). */
11909 size_t len = strlen (str);
11910 if ((unsigned char)str[len - 1] == target_newline
11911 && (size_t) (int) len == len
11912 && (int) len > 0)
11914 char *newstr;
11915 tree offset_node, string_cst;
11917 /* Create a NUL-terminated string that's one char shorter
11918 than the original, stripping off the trailing '\n'. */
11919 newarg = build_string_literal (len, str);
11920 string_cst = string_constant (newarg, &offset_node);
11921 gcc_checking_assert (string_cst
11922 && (TREE_STRING_LENGTH (string_cst)
11923 == (int) len)
11924 && integer_zerop (offset_node)
11925 && (unsigned char)
11926 TREE_STRING_POINTER (string_cst)[len - 1]
11927 == target_newline);
11928 /* build_string_literal creates a new STRING_CST,
11929 modify it in place to avoid double copying. */
11930 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
11931 newstr[len - 1] = '\0';
11932 if (fn_puts)
11933 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
11935 else
11936 /* We'd like to arrange to call fputs(string,stdout) here,
11937 but we need stdout and don't have a way to get it yet. */
11938 return NULL_TREE;
11942 /* The other optimizations can be done only on the non-va_list variants. */
11943 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11944 return NULL_TREE;
11946 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
11947 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
11949 if (!arg || !validate_arg (arg, POINTER_TYPE))
11950 return NULL_TREE;
11951 if (fn_puts)
11952 call = build_call_expr_loc (loc, fn_puts, 1, arg);
11955 /* If the format specifier was "%c", call __builtin_putchar(arg). */
11956 else if (strcmp (fmt_str, target_percent_c) == 0)
11958 if (!arg || !validate_arg (arg, INTEGER_TYPE))
11959 return NULL_TREE;
11960 if (fn_putchar)
11961 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
11964 if (!call)
11965 return NULL_TREE;
11967 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
11970 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
11971 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
11972 more than 3 arguments, and ARG may be null in the 2-argument case.
11974 Return NULL_TREE if no simplification was possible, otherwise return the
11975 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11976 code of the function to be simplified. */
11978 static tree
11979 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
11980 tree fmt, tree arg, bool ignore,
11981 enum built_in_function fcode)
11983 tree fn_fputc, fn_fputs, call = NULL_TREE;
11984 const char *fmt_str = NULL;
11986 /* If the return value is used, don't do the transformation. */
11987 if (! ignore)
11988 return NULL_TREE;
11990 /* Verify the required arguments in the original call. */
11991 if (!validate_arg (fp, POINTER_TYPE))
11992 return NULL_TREE;
11993 if (!validate_arg (fmt, POINTER_TYPE))
11994 return NULL_TREE;
11996 /* Check whether the format is a literal string constant. */
11997 fmt_str = c_getstr (fmt);
11998 if (fmt_str == NULL)
11999 return NULL_TREE;
12001 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12003 /* If we're using an unlocked function, assume the other
12004 unlocked functions exist explicitly. */
12005 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
12006 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
12008 else
12010 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
12011 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
12014 if (!init_target_chars ())
12015 return NULL_TREE;
12017 /* If the format doesn't contain % args or %%, use strcpy. */
12018 if (strchr (fmt_str, target_percent) == NULL)
12020 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12021 && arg)
12022 return NULL_TREE;
12024 /* If the format specifier was "", fprintf does nothing. */
12025 if (fmt_str[0] == '\0')
12027 /* If FP has side-effects, just wait until gimplification is
12028 done. */
12029 if (TREE_SIDE_EFFECTS (fp))
12030 return NULL_TREE;
12032 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12035 /* When "string" doesn't contain %, replace all cases of
12036 fprintf (fp, string) with fputs (string, fp). The fputs
12037 builtin will take care of special cases like length == 1. */
12038 if (fn_fputs)
12039 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12042 /* The other optimizations can be done only on the non-va_list variants. */
12043 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12044 return NULL_TREE;
12046 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12047 else if (strcmp (fmt_str, target_percent_s) == 0)
12049 if (!arg || !validate_arg (arg, POINTER_TYPE))
12050 return NULL_TREE;
12051 if (fn_fputs)
12052 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12055 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12056 else if (strcmp (fmt_str, target_percent_c) == 0)
12058 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12059 return NULL_TREE;
12060 if (fn_fputc)
12061 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12064 if (!call)
12065 return NULL_TREE;
12066 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12069 /* Initialize format string characters in the target charset. */
12071 bool
12072 init_target_chars (void)
12074 static bool init;
12075 if (!init)
12077 target_newline = lang_hooks.to_target_charset ('\n');
12078 target_percent = lang_hooks.to_target_charset ('%');
12079 target_c = lang_hooks.to_target_charset ('c');
12080 target_s = lang_hooks.to_target_charset ('s');
12081 if (target_newline == 0 || target_percent == 0 || target_c == 0
12082 || target_s == 0)
12083 return false;
12085 target_percent_c[0] = target_percent;
12086 target_percent_c[1] = target_c;
12087 target_percent_c[2] = '\0';
12089 target_percent_s[0] = target_percent;
12090 target_percent_s[1] = target_s;
12091 target_percent_s[2] = '\0';
12093 target_percent_s_newline[0] = target_percent;
12094 target_percent_s_newline[1] = target_s;
12095 target_percent_s_newline[2] = target_newline;
12096 target_percent_s_newline[3] = '\0';
12098 init = true;
12100 return true;
12103 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12104 and no overflow/underflow occurred. INEXACT is true if M was not
12105 exactly calculated. TYPE is the tree type for the result. This
12106 function assumes that you cleared the MPFR flags and then
12107 calculated M to see if anything subsequently set a flag prior to
12108 entering this function. Return NULL_TREE if any checks fail. */
12110 static tree
12111 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12113 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12114 overflow/underflow occurred. If -frounding-math, proceed iff the
12115 result of calling FUNC was exact. */
12116 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12117 && (!flag_rounding_math || !inexact))
12119 REAL_VALUE_TYPE rr;
12121 real_from_mpfr (&rr, m, type, GMP_RNDN);
12122 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12123 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12124 but the mpft_t is not, then we underflowed in the
12125 conversion. */
12126 if (real_isfinite (&rr)
12127 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12129 REAL_VALUE_TYPE rmode;
12131 real_convert (&rmode, TYPE_MODE (type), &rr);
12132 /* Proceed iff the specified mode can hold the value. */
12133 if (real_identical (&rmode, &rr))
12134 return build_real (type, rmode);
12137 return NULL_TREE;
12140 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12141 number and no overflow/underflow occurred. INEXACT is true if M
12142 was not exactly calculated. TYPE is the tree type for the result.
12143 This function assumes that you cleared the MPFR flags and then
12144 calculated M to see if anything subsequently set a flag prior to
12145 entering this function. Return NULL_TREE if any checks fail, if
12146 FORCE_CONVERT is true, then bypass the checks. */
12148 static tree
12149 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12151 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12152 overflow/underflow occurred. If -frounding-math, proceed iff the
12153 result of calling FUNC was exact. */
12154 if (force_convert
12155 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12156 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12157 && (!flag_rounding_math || !inexact)))
12159 REAL_VALUE_TYPE re, im;
12161 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12162 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12163 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12164 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12165 but the mpft_t is not, then we underflowed in the
12166 conversion. */
12167 if (force_convert
12168 || (real_isfinite (&re) && real_isfinite (&im)
12169 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12170 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12172 REAL_VALUE_TYPE re_mode, im_mode;
12174 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12175 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12176 /* Proceed iff the specified mode can hold the value. */
12177 if (force_convert
12178 || (real_identical (&re_mode, &re)
12179 && real_identical (&im_mode, &im)))
12180 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12181 build_real (TREE_TYPE (type), im_mode));
12184 return NULL_TREE;
12187 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12188 FUNC on it and return the resulting value as a tree with type TYPE.
12189 If MIN and/or MAX are not NULL, then the supplied ARG must be
12190 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12191 acceptable values, otherwise they are not. The mpfr precision is
12192 set to the precision of TYPE. We assume that function FUNC returns
12193 zero if the result could be calculated exactly within the requested
12194 precision. */
12196 static tree
12197 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12198 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12199 bool inclusive)
12201 tree result = NULL_TREE;
12203 STRIP_NOPS (arg);
12205 /* To proceed, MPFR must exactly represent the target floating point
12206 format, which only happens when the target base equals two. */
12207 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12208 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12210 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12212 if (real_isfinite (ra)
12213 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12214 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12216 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12217 const int prec = fmt->p;
12218 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12219 int inexact;
12220 mpfr_t m;
12222 mpfr_init2 (m, prec);
12223 mpfr_from_real (m, ra, GMP_RNDN);
12224 mpfr_clear_flags ();
12225 inexact = func (m, m, rnd);
12226 result = do_mpfr_ckconv (m, type, inexact);
12227 mpfr_clear (m);
12231 return result;
12234 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12235 FUNC on it and return the resulting value as a tree with type TYPE.
12236 The mpfr precision is set to the precision of TYPE. We assume that
12237 function FUNC returns zero if the result could be calculated
12238 exactly within the requested precision. */
12240 static tree
12241 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12242 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12244 tree result = NULL_TREE;
12246 STRIP_NOPS (arg1);
12247 STRIP_NOPS (arg2);
12249 /* To proceed, MPFR must exactly represent the target floating point
12250 format, which only happens when the target base equals two. */
12251 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12252 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12253 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12255 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12256 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12258 if (real_isfinite (ra1) && real_isfinite (ra2))
12260 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12261 const int prec = fmt->p;
12262 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12263 int inexact;
12264 mpfr_t m1, m2;
12266 mpfr_inits2 (prec, m1, m2, NULL);
12267 mpfr_from_real (m1, ra1, GMP_RNDN);
12268 mpfr_from_real (m2, ra2, GMP_RNDN);
12269 mpfr_clear_flags ();
12270 inexact = func (m1, m1, m2, rnd);
12271 result = do_mpfr_ckconv (m1, type, inexact);
12272 mpfr_clears (m1, m2, NULL);
12276 return result;
12279 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12280 FUNC on it and return the resulting value as a tree with type TYPE.
12281 The mpfr precision is set to the precision of TYPE. We assume that
12282 function FUNC returns zero if the result could be calculated
12283 exactly within the requested precision. */
12285 static tree
12286 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12287 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12289 tree result = NULL_TREE;
12291 STRIP_NOPS (arg1);
12292 STRIP_NOPS (arg2);
12293 STRIP_NOPS (arg3);
12295 /* To proceed, MPFR must exactly represent the target floating point
12296 format, which only happens when the target base equals two. */
12297 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12298 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12299 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12300 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12302 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12303 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12304 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12306 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12308 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12309 const int prec = fmt->p;
12310 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12311 int inexact;
12312 mpfr_t m1, m2, m3;
12314 mpfr_inits2 (prec, m1, m2, m3, NULL);
12315 mpfr_from_real (m1, ra1, GMP_RNDN);
12316 mpfr_from_real (m2, ra2, GMP_RNDN);
12317 mpfr_from_real (m3, ra3, GMP_RNDN);
12318 mpfr_clear_flags ();
12319 inexact = func (m1, m1, m2, m3, rnd);
12320 result = do_mpfr_ckconv (m1, type, inexact);
12321 mpfr_clears (m1, m2, m3, NULL);
12325 return result;
12328 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12329 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12330 If ARG_SINP and ARG_COSP are NULL then the result is returned
12331 as a complex value.
12332 The type is taken from the type of ARG and is used for setting the
12333 precision of the calculation and results. */
12335 static tree
12336 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12338 tree const type = TREE_TYPE (arg);
12339 tree result = NULL_TREE;
12341 STRIP_NOPS (arg);
12343 /* To proceed, MPFR must exactly represent the target floating point
12344 format, which only happens when the target base equals two. */
12345 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12346 && TREE_CODE (arg) == REAL_CST
12347 && !TREE_OVERFLOW (arg))
12349 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12351 if (real_isfinite (ra))
12353 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12354 const int prec = fmt->p;
12355 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12356 tree result_s, result_c;
12357 int inexact;
12358 mpfr_t m, ms, mc;
12360 mpfr_inits2 (prec, m, ms, mc, NULL);
12361 mpfr_from_real (m, ra, GMP_RNDN);
12362 mpfr_clear_flags ();
12363 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12364 result_s = do_mpfr_ckconv (ms, type, inexact);
12365 result_c = do_mpfr_ckconv (mc, type, inexact);
12366 mpfr_clears (m, ms, mc, NULL);
12367 if (result_s && result_c)
12369 /* If we are to return in a complex value do so. */
12370 if (!arg_sinp && !arg_cosp)
12371 return build_complex (build_complex_type (type),
12372 result_c, result_s);
12374 /* Dereference the sin/cos pointer arguments. */
12375 arg_sinp = build_fold_indirect_ref (arg_sinp);
12376 arg_cosp = build_fold_indirect_ref (arg_cosp);
12377 /* Proceed if valid pointer type were passed in. */
12378 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12379 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12381 /* Set the values. */
12382 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12383 result_s);
12384 TREE_SIDE_EFFECTS (result_s) = 1;
12385 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12386 result_c);
12387 TREE_SIDE_EFFECTS (result_c) = 1;
12388 /* Combine the assignments into a compound expr. */
12389 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12390 result_s, result_c));
12395 return result;
12398 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12399 two-argument mpfr order N Bessel function FUNC on them and return
12400 the resulting value as a tree with type TYPE. The mpfr precision
12401 is set to the precision of TYPE. We assume that function FUNC
12402 returns zero if the result could be calculated exactly within the
12403 requested precision. */
12404 static tree
12405 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12406 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12407 const REAL_VALUE_TYPE *min, bool inclusive)
12409 tree result = NULL_TREE;
12411 STRIP_NOPS (arg1);
12412 STRIP_NOPS (arg2);
12414 /* To proceed, MPFR must exactly represent the target floating point
12415 format, which only happens when the target base equals two. */
12416 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12417 && tree_fits_shwi_p (arg1)
12418 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12420 const HOST_WIDE_INT n = tree_to_shwi (arg1);
12421 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12423 if (n == (long)n
12424 && real_isfinite (ra)
12425 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12427 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12428 const int prec = fmt->p;
12429 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12430 int inexact;
12431 mpfr_t m;
12433 mpfr_init2 (m, prec);
12434 mpfr_from_real (m, ra, GMP_RNDN);
12435 mpfr_clear_flags ();
12436 inexact = func (m, n, m, rnd);
12437 result = do_mpfr_ckconv (m, type, inexact);
12438 mpfr_clear (m);
12442 return result;
12445 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12446 the pointer *(ARG_QUO) and return the result. The type is taken
12447 from the type of ARG0 and is used for setting the precision of the
12448 calculation and results. */
12450 static tree
12451 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12453 tree const type = TREE_TYPE (arg0);
12454 tree result = NULL_TREE;
12456 STRIP_NOPS (arg0);
12457 STRIP_NOPS (arg1);
12459 /* To proceed, MPFR must exactly represent the target floating point
12460 format, which only happens when the target base equals two. */
12461 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12462 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12463 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12465 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12466 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12468 if (real_isfinite (ra0) && real_isfinite (ra1))
12470 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12471 const int prec = fmt->p;
12472 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12473 tree result_rem;
12474 long integer_quo;
12475 mpfr_t m0, m1;
12477 mpfr_inits2 (prec, m0, m1, NULL);
12478 mpfr_from_real (m0, ra0, GMP_RNDN);
12479 mpfr_from_real (m1, ra1, GMP_RNDN);
12480 mpfr_clear_flags ();
12481 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12482 /* Remquo is independent of the rounding mode, so pass
12483 inexact=0 to do_mpfr_ckconv(). */
12484 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12485 mpfr_clears (m0, m1, NULL);
12486 if (result_rem)
12488 /* MPFR calculates quo in the host's long so it may
12489 return more bits in quo than the target int can hold
12490 if sizeof(host long) > sizeof(target int). This can
12491 happen even for native compilers in LP64 mode. In
12492 these cases, modulo the quo value with the largest
12493 number that the target int can hold while leaving one
12494 bit for the sign. */
12495 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12496 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12498 /* Dereference the quo pointer argument. */
12499 arg_quo = build_fold_indirect_ref (arg_quo);
12500 /* Proceed iff a valid pointer type was passed in. */
12501 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12503 /* Set the value. */
12504 tree result_quo
12505 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12506 build_int_cst (TREE_TYPE (arg_quo),
12507 integer_quo));
12508 TREE_SIDE_EFFECTS (result_quo) = 1;
12509 /* Combine the quo assignment with the rem. */
12510 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12511 result_quo, result_rem));
12516 return result;
12519 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12520 resulting value as a tree with type TYPE. The mpfr precision is
12521 set to the precision of TYPE. We assume that this mpfr function
12522 returns zero if the result could be calculated exactly within the
12523 requested precision. In addition, the integer pointer represented
12524 by ARG_SG will be dereferenced and set to the appropriate signgam
12525 (-1,1) value. */
12527 static tree
12528 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12530 tree result = NULL_TREE;
12532 STRIP_NOPS (arg);
12534 /* To proceed, MPFR must exactly represent the target floating point
12535 format, which only happens when the target base equals two. Also
12536 verify ARG is a constant and that ARG_SG is an int pointer. */
12537 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12538 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12539 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12540 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12542 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12544 /* In addition to NaN and Inf, the argument cannot be zero or a
12545 negative integer. */
12546 if (real_isfinite (ra)
12547 && ra->cl != rvc_zero
12548 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12550 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12551 const int prec = fmt->p;
12552 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12553 int inexact, sg;
12554 mpfr_t m;
12555 tree result_lg;
12557 mpfr_init2 (m, prec);
12558 mpfr_from_real (m, ra, GMP_RNDN);
12559 mpfr_clear_flags ();
12560 inexact = mpfr_lgamma (m, &sg, m, rnd);
12561 result_lg = do_mpfr_ckconv (m, type, inexact);
12562 mpfr_clear (m);
12563 if (result_lg)
12565 tree result_sg;
12567 /* Dereference the arg_sg pointer argument. */
12568 arg_sg = build_fold_indirect_ref (arg_sg);
12569 /* Assign the signgam value into *arg_sg. */
12570 result_sg = fold_build2 (MODIFY_EXPR,
12571 TREE_TYPE (arg_sg), arg_sg,
12572 build_int_cst (TREE_TYPE (arg_sg), sg));
12573 TREE_SIDE_EFFECTS (result_sg) = 1;
12574 /* Combine the signgam assignment with the lgamma result. */
12575 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12576 result_sg, result_lg));
12581 return result;
12584 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12585 function FUNC on it and return the resulting value as a tree with
12586 type TYPE. The mpfr precision is set to the precision of TYPE. We
12587 assume that function FUNC returns zero if the result could be
12588 calculated exactly within the requested precision. */
12590 static tree
12591 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12593 tree result = NULL_TREE;
12595 STRIP_NOPS (arg);
12597 /* To proceed, MPFR must exactly represent the target floating point
12598 format, which only happens when the target base equals two. */
12599 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12600 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12601 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12603 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12604 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12606 if (real_isfinite (re) && real_isfinite (im))
12608 const struct real_format *const fmt =
12609 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12610 const int prec = fmt->p;
12611 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12612 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12613 int inexact;
12614 mpc_t m;
12616 mpc_init2 (m, prec);
12617 mpfr_from_real (mpc_realref (m), re, rnd);
12618 mpfr_from_real (mpc_imagref (m), im, rnd);
12619 mpfr_clear_flags ();
12620 inexact = func (m, m, crnd);
12621 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12622 mpc_clear (m);
12626 return result;
12629 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12630 mpc function FUNC on it and return the resulting value as a tree
12631 with type TYPE. The mpfr precision is set to the precision of
12632 TYPE. We assume that function FUNC returns zero if the result
12633 could be calculated exactly within the requested precision. If
12634 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12635 in the arguments and/or results. */
12637 tree
12638 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12639 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12641 tree result = NULL_TREE;
12643 STRIP_NOPS (arg0);
12644 STRIP_NOPS (arg1);
12646 /* To proceed, MPFR must exactly represent the target floating point
12647 format, which only happens when the target base equals two. */
12648 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12649 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12650 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12651 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12652 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12654 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12655 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12656 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12657 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12659 if (do_nonfinite
12660 || (real_isfinite (re0) && real_isfinite (im0)
12661 && real_isfinite (re1) && real_isfinite (im1)))
12663 const struct real_format *const fmt =
12664 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12665 const int prec = fmt->p;
12666 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12667 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12668 int inexact;
12669 mpc_t m0, m1;
12671 mpc_init2 (m0, prec);
12672 mpc_init2 (m1, prec);
12673 mpfr_from_real (mpc_realref (m0), re0, rnd);
12674 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12675 mpfr_from_real (mpc_realref (m1), re1, rnd);
12676 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12677 mpfr_clear_flags ();
12678 inexact = func (m0, m0, m1, crnd);
12679 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12680 mpc_clear (m0);
12681 mpc_clear (m1);
12685 return result;
12688 /* A wrapper function for builtin folding that prevents warnings for
12689 "statement without effect" and the like, caused by removing the
12690 call node earlier than the warning is generated. */
12692 tree
12693 fold_call_stmt (gimple stmt, bool ignore)
12695 tree ret = NULL_TREE;
12696 tree fndecl = gimple_call_fndecl (stmt);
12697 location_t loc = gimple_location (stmt);
12698 if (fndecl
12699 && TREE_CODE (fndecl) == FUNCTION_DECL
12700 && DECL_BUILT_IN (fndecl)
12701 && !gimple_call_va_arg_pack_p (stmt))
12703 int nargs = gimple_call_num_args (stmt);
12704 tree *args = (nargs > 0
12705 ? gimple_call_arg_ptr (stmt, 0)
12706 : &error_mark_node);
12708 if (avoid_folding_inline_builtin (fndecl))
12709 return NULL_TREE;
12710 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12712 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12714 else
12716 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
12717 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12718 if (ret)
12720 /* Propagate location information from original call to
12721 expansion of builtin. Otherwise things like
12722 maybe_emit_chk_warning, that operate on the expansion
12723 of a builtin, will use the wrong location information. */
12724 if (gimple_has_location (stmt))
12726 tree realret = ret;
12727 if (TREE_CODE (ret) == NOP_EXPR)
12728 realret = TREE_OPERAND (ret, 0);
12729 if (CAN_HAVE_LOCATION_P (realret)
12730 && !EXPR_HAS_LOCATION (realret))
12731 SET_EXPR_LOCATION (realret, loc);
12732 return realret;
12734 return ret;
12738 return NULL_TREE;
12741 /* Look up the function in builtin_decl that corresponds to DECL
12742 and set ASMSPEC as its user assembler name. DECL must be a
12743 function decl that declares a builtin. */
12745 void
12746 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12748 tree builtin;
12749 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12750 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12751 && asmspec != 0);
12753 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12754 set_user_assembler_name (builtin, asmspec);
12755 switch (DECL_FUNCTION_CODE (decl))
12757 case BUILT_IN_MEMCPY:
12758 init_block_move_fn (asmspec);
12759 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12760 break;
12761 case BUILT_IN_MEMSET:
12762 init_block_clear_fn (asmspec);
12763 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12764 break;
12765 case BUILT_IN_MEMMOVE:
12766 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12767 break;
12768 case BUILT_IN_MEMCMP:
12769 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12770 break;
12771 case BUILT_IN_ABORT:
12772 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12773 break;
12774 case BUILT_IN_FFS:
12775 if (INT_TYPE_SIZE < BITS_PER_WORD)
12777 set_user_assembler_libfunc ("ffs", asmspec);
12778 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12779 MODE_INT, 0), "ffs");
12781 break;
12782 default:
12783 break;
12787 /* Return true if DECL is a builtin that expands to a constant or similarly
12788 simple code. */
12789 bool
12790 is_simple_builtin (tree decl)
12792 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12793 switch (DECL_FUNCTION_CODE (decl))
12795 /* Builtins that expand to constants. */
12796 case BUILT_IN_CONSTANT_P:
12797 case BUILT_IN_EXPECT:
12798 case BUILT_IN_OBJECT_SIZE:
12799 case BUILT_IN_UNREACHABLE:
12800 /* Simple register moves or loads from stack. */
12801 case BUILT_IN_ASSUME_ALIGNED:
12802 case BUILT_IN_RETURN_ADDRESS:
12803 case BUILT_IN_EXTRACT_RETURN_ADDR:
12804 case BUILT_IN_FROB_RETURN_ADDR:
12805 case BUILT_IN_RETURN:
12806 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12807 case BUILT_IN_FRAME_ADDRESS:
12808 case BUILT_IN_VA_END:
12809 case BUILT_IN_STACK_SAVE:
12810 case BUILT_IN_STACK_RESTORE:
12811 /* Exception state returns or moves registers around. */
12812 case BUILT_IN_EH_FILTER:
12813 case BUILT_IN_EH_POINTER:
12814 case BUILT_IN_EH_COPY_VALUES:
12815 return true;
12817 default:
12818 return false;
12821 return false;
12824 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12825 most probably expanded inline into reasonably simple code. This is a
12826 superset of is_simple_builtin. */
12827 bool
12828 is_inexpensive_builtin (tree decl)
12830 if (!decl)
12831 return false;
12832 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12833 return true;
12834 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12835 switch (DECL_FUNCTION_CODE (decl))
12837 case BUILT_IN_ABS:
12838 case BUILT_IN_ALLOCA:
12839 case BUILT_IN_ALLOCA_WITH_ALIGN:
12840 case BUILT_IN_BSWAP16:
12841 case BUILT_IN_BSWAP32:
12842 case BUILT_IN_BSWAP64:
12843 case BUILT_IN_CLZ:
12844 case BUILT_IN_CLZIMAX:
12845 case BUILT_IN_CLZL:
12846 case BUILT_IN_CLZLL:
12847 case BUILT_IN_CTZ:
12848 case BUILT_IN_CTZIMAX:
12849 case BUILT_IN_CTZL:
12850 case BUILT_IN_CTZLL:
12851 case BUILT_IN_FFS:
12852 case BUILT_IN_FFSIMAX:
12853 case BUILT_IN_FFSL:
12854 case BUILT_IN_FFSLL:
12855 case BUILT_IN_IMAXABS:
12856 case BUILT_IN_FINITE:
12857 case BUILT_IN_FINITEF:
12858 case BUILT_IN_FINITEL:
12859 case BUILT_IN_FINITED32:
12860 case BUILT_IN_FINITED64:
12861 case BUILT_IN_FINITED128:
12862 case BUILT_IN_FPCLASSIFY:
12863 case BUILT_IN_ISFINITE:
12864 case BUILT_IN_ISINF_SIGN:
12865 case BUILT_IN_ISINF:
12866 case BUILT_IN_ISINFF:
12867 case BUILT_IN_ISINFL:
12868 case BUILT_IN_ISINFD32:
12869 case BUILT_IN_ISINFD64:
12870 case BUILT_IN_ISINFD128:
12871 case BUILT_IN_ISNAN:
12872 case BUILT_IN_ISNANF:
12873 case BUILT_IN_ISNANL:
12874 case BUILT_IN_ISNAND32:
12875 case BUILT_IN_ISNAND64:
12876 case BUILT_IN_ISNAND128:
12877 case BUILT_IN_ISNORMAL:
12878 case BUILT_IN_ISGREATER:
12879 case BUILT_IN_ISGREATEREQUAL:
12880 case BUILT_IN_ISLESS:
12881 case BUILT_IN_ISLESSEQUAL:
12882 case BUILT_IN_ISLESSGREATER:
12883 case BUILT_IN_ISUNORDERED:
12884 case BUILT_IN_VA_ARG_PACK:
12885 case BUILT_IN_VA_ARG_PACK_LEN:
12886 case BUILT_IN_VA_COPY:
12887 case BUILT_IN_TRAP:
12888 case BUILT_IN_SAVEREGS:
12889 case BUILT_IN_POPCOUNTL:
12890 case BUILT_IN_POPCOUNTLL:
12891 case BUILT_IN_POPCOUNTIMAX:
12892 case BUILT_IN_POPCOUNT:
12893 case BUILT_IN_PARITYL:
12894 case BUILT_IN_PARITYLL:
12895 case BUILT_IN_PARITYIMAX:
12896 case BUILT_IN_PARITY:
12897 case BUILT_IN_LABS:
12898 case BUILT_IN_LLABS:
12899 case BUILT_IN_PREFETCH:
12900 return true;
12902 default:
12903 return is_simple_builtin (decl);
12906 return false;