PR middle-end/36757
[official-gcc.git] / gcc / builtins.c
blob31969ca3e534cebc745f01689f613011dea0eadf
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "predict.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "rtl.h"
28 #include "alias.h"
29 #include "fold-const.h"
30 #include "stringpool.h"
31 #include "stor-layout.h"
32 #include "calls.h"
33 #include "varasm.h"
34 #include "tree-object-size.h"
35 #include "realmpfr.h"
36 #include "cfgrtl.h"
37 #include "internal-fn.h"
38 #include "flags.h"
39 #include "regs.h"
40 #include "except.h"
41 #include "insn-config.h"
42 #include "expmed.h"
43 #include "dojump.h"
44 #include "explow.h"
45 #include "emit-rtl.h"
46 #include "stmt.h"
47 #include "expr.h"
48 #include "insn-codes.h"
49 #include "optabs.h"
50 #include "libfuncs.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "typeclass.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "langhooks.h"
57 #include "tree-ssanames.h"
58 #include "tree-dfa.h"
59 #include "value-prof.h"
60 #include "diagnostic-core.h"
61 #include "builtins.h"
62 #include "asan.h"
63 #include "cilk.h"
64 #include "cgraph.h"
65 #include "tree-chkp.h"
66 #include "rtl-chkp.h"
67 #include "gomp-constants.h"
70 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
72 struct target_builtins default_target_builtins;
73 #if SWITCHABLE_TARGET
74 struct target_builtins *this_target_builtins = &default_target_builtins;
75 #endif
77 /* Define the names of the builtin function types and codes. */
78 const char *const built_in_class_names[BUILT_IN_LAST]
79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
81 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
82 const char * built_in_names[(int) END_BUILTINS] =
84 #include "builtins.def"
86 #undef DEF_BUILTIN
88 /* Setup an array of builtin_info_type, make sure each element decl is
89 initialized to NULL_TREE. */
90 builtin_info_type builtin_info[(int)END_BUILTINS];
92 /* Non-zero if __builtin_constant_p should be folded right away. */
93 bool force_folding_builtin_constant_p;
95 static rtx c_readstr (const char *, machine_mode);
96 static int target_char_cast (tree, char *);
97 static rtx get_memory_rtx (tree, tree);
98 static int apply_args_size (void);
99 static int apply_result_size (void);
100 static rtx result_vector (int, rtx);
101 static void expand_builtin_prefetch (tree);
102 static rtx expand_builtin_apply_args (void);
103 static rtx expand_builtin_apply_args_1 (void);
104 static rtx expand_builtin_apply (rtx, rtx, rtx);
105 static void expand_builtin_return (rtx);
106 static enum type_class type_to_class (tree);
107 static rtx expand_builtin_classify_type (tree);
108 static void expand_errno_check (tree, rtx);
109 static rtx expand_builtin_mathfn (tree, rtx, rtx);
110 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
126 static rtx expand_builtin_memcpy (tree, rtx);
127 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
128 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
129 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
130 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
131 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
132 machine_mode, int, tree);
133 static rtx expand_builtin_strcpy (tree, rtx);
134 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
135 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
136 static rtx expand_builtin_strncpy (tree, rtx);
137 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
138 static rtx expand_builtin_memset (tree, rtx, machine_mode);
139 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
140 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
141 static rtx expand_builtin_bzero (tree);
142 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
143 static rtx expand_builtin_alloca (tree, bool);
144 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
145 static rtx expand_builtin_frame_address (tree, tree);
146 static tree stabilize_va_list_loc (location_t, tree, int);
147 static rtx expand_builtin_expect (tree, rtx);
148 static tree fold_builtin_constant_p (tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (location_t, tree, tree);
151 static tree fold_builtin_inf (location_t, tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
157 static rtx expand_builtin_fabs (tree, rtx, rtx);
158 static rtx expand_builtin_signbit (tree, rtx);
159 static tree fold_builtin_sqrt (location_t, tree, tree);
160 static tree fold_builtin_cbrt (location_t, tree, tree);
161 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
162 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
163 static tree fold_builtin_cos (location_t, tree, tree, tree);
164 static tree fold_builtin_cosh (location_t, tree, tree, tree);
165 static tree fold_builtin_tan (tree, tree);
166 static tree fold_builtin_trunc (location_t, tree, tree);
167 static tree fold_builtin_floor (location_t, tree, tree);
168 static tree fold_builtin_ceil (location_t, tree, tree);
169 static tree fold_builtin_round (location_t, tree, tree);
170 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
171 static tree fold_builtin_bitop (tree, tree);
172 static tree fold_builtin_strchr (location_t, tree, tree, tree);
173 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
174 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
175 static tree fold_builtin_strcmp (location_t, tree, tree);
176 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
177 static tree fold_builtin_signbit (location_t, tree, tree);
178 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
179 static tree fold_builtin_isascii (location_t, tree);
180 static tree fold_builtin_toascii (location_t, tree);
181 static tree fold_builtin_isdigit (location_t, tree);
182 static tree fold_builtin_fabs (location_t, tree, tree);
183 static tree fold_builtin_abs (location_t, tree, tree);
184 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
185 enum tree_code);
186 static tree fold_builtin_0 (location_t, tree);
187 static tree fold_builtin_1 (location_t, tree, tree);
188 static tree fold_builtin_2 (location_t, tree, tree, tree);
189 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
190 static tree fold_builtin_varargs (location_t, tree, tree*, int);
192 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
193 static tree fold_builtin_strstr (location_t, tree, tree, tree);
194 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
195 static tree fold_builtin_strspn (location_t, tree, tree);
196 static tree fold_builtin_strcspn (location_t, tree, tree);
198 static rtx expand_builtin_object_size (tree);
199 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
200 enum built_in_function);
201 static void maybe_emit_chk_warning (tree, enum built_in_function);
202 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
203 static void maybe_emit_free_warning (tree);
204 static tree fold_builtin_object_size (tree, tree);
206 unsigned HOST_WIDE_INT target_newline;
207 unsigned HOST_WIDE_INT target_percent;
208 static unsigned HOST_WIDE_INT target_c;
209 static unsigned HOST_WIDE_INT target_s;
210 char target_percent_c[3];
211 char target_percent_s[3];
212 char target_percent_s_newline[4];
213 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
214 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
215 static tree do_mpfr_arg2 (tree, tree, tree,
216 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
217 static tree do_mpfr_arg3 (tree, tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_sincos (tree, tree, tree);
220 static tree do_mpfr_bessel_n (tree, tree, tree,
221 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
222 const REAL_VALUE_TYPE *, bool);
223 static tree do_mpfr_remquo (tree, tree, tree);
224 static tree do_mpfr_lgamma_r (tree, tree, tree);
225 static void expand_builtin_sync_synchronize (void);
227 /* Return true if NAME starts with __builtin_ or __sync_. */
229 static bool
230 is_builtin_name (const char *name)
232 if (strncmp (name, "__builtin_", 10) == 0)
233 return true;
234 if (strncmp (name, "__sync_", 7) == 0)
235 return true;
236 if (strncmp (name, "__atomic_", 9) == 0)
237 return true;
238 if (flag_cilkplus
239 && (!strcmp (name, "__cilkrts_detach")
240 || !strcmp (name, "__cilkrts_pop_frame")))
241 return true;
242 return false;
246 /* Return true if DECL is a function symbol representing a built-in. */
248 bool
249 is_builtin_fn (tree decl)
251 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
254 /* Return true if NODE should be considered for inline expansion regardless
255 of the optimization level. This means whenever a function is invoked with
256 its "internal" name, which normally contains the prefix "__builtin". */
258 static bool
259 called_as_built_in (tree node)
261 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
262 we want the name used to call the function, not the name it
263 will have. */
264 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
265 return is_builtin_name (name);
268 /* Compute values M and N such that M divides (address of EXP - N) and such
269 that N < M. If these numbers can be determined, store M in alignp and N in
270 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
271 *alignp and any bit-offset to *bitposp.
273 Note that the address (and thus the alignment) computed here is based
274 on the address to which a symbol resolves, whereas DECL_ALIGN is based
275 on the address at which an object is actually located. These two
276 addresses are not always the same. For example, on ARM targets,
277 the address &foo of a Thumb function foo() has the lowest bit set,
278 whereas foo() itself starts on an even address.
280 If ADDR_P is true we are taking the address of the memory reference EXP
281 and thus cannot rely on the access taking place. */
283 static bool
284 get_object_alignment_2 (tree exp, unsigned int *alignp,
285 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
287 HOST_WIDE_INT bitsize, bitpos;
288 tree offset;
289 machine_mode mode;
290 int unsignedp, volatilep;
291 unsigned int align = BITS_PER_UNIT;
292 bool known_alignment = false;
294 /* Get the innermost object and the constant (bitpos) and possibly
295 variable (offset) offset of the access. */
296 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
297 &mode, &unsignedp, &volatilep, true);
299 /* Extract alignment information from the innermost object and
300 possibly adjust bitpos and offset. */
301 if (TREE_CODE (exp) == FUNCTION_DECL)
303 /* Function addresses can encode extra information besides their
304 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
305 allows the low bit to be used as a virtual bit, we know
306 that the address itself must be at least 2-byte aligned. */
307 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
308 align = 2 * BITS_PER_UNIT;
310 else if (TREE_CODE (exp) == LABEL_DECL)
312 else if (TREE_CODE (exp) == CONST_DECL)
314 /* The alignment of a CONST_DECL is determined by its initializer. */
315 exp = DECL_INITIAL (exp);
316 align = TYPE_ALIGN (TREE_TYPE (exp));
317 #ifdef CONSTANT_ALIGNMENT
318 if (CONSTANT_CLASS_P (exp))
319 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
320 #endif
321 known_alignment = true;
323 else if (DECL_P (exp))
325 align = DECL_ALIGN (exp);
326 known_alignment = true;
328 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
330 align = TYPE_ALIGN (TREE_TYPE (exp));
332 else if (TREE_CODE (exp) == INDIRECT_REF
333 || TREE_CODE (exp) == MEM_REF
334 || TREE_CODE (exp) == TARGET_MEM_REF)
336 tree addr = TREE_OPERAND (exp, 0);
337 unsigned ptr_align;
338 unsigned HOST_WIDE_INT ptr_bitpos;
339 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
341 /* If the address is explicitely aligned, handle that. */
342 if (TREE_CODE (addr) == BIT_AND_EXPR
343 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
345 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
346 ptr_bitmask *= BITS_PER_UNIT;
347 align = ptr_bitmask & -ptr_bitmask;
348 addr = TREE_OPERAND (addr, 0);
351 known_alignment
352 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
353 align = MAX (ptr_align, align);
355 /* Re-apply explicit alignment to the bitpos. */
356 ptr_bitpos &= ptr_bitmask;
358 /* The alignment of the pointer operand in a TARGET_MEM_REF
359 has to take the variable offset parts into account. */
360 if (TREE_CODE (exp) == TARGET_MEM_REF)
362 if (TMR_INDEX (exp))
364 unsigned HOST_WIDE_INT step = 1;
365 if (TMR_STEP (exp))
366 step = TREE_INT_CST_LOW (TMR_STEP (exp));
367 align = MIN (align, (step & -step) * BITS_PER_UNIT);
369 if (TMR_INDEX2 (exp))
370 align = BITS_PER_UNIT;
371 known_alignment = false;
374 /* When EXP is an actual memory reference then we can use
375 TYPE_ALIGN of a pointer indirection to derive alignment.
376 Do so only if get_pointer_alignment_1 did not reveal absolute
377 alignment knowledge and if using that alignment would
378 improve the situation. */
379 if (!addr_p && !known_alignment
380 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
381 align = TYPE_ALIGN (TREE_TYPE (exp));
382 else
384 /* Else adjust bitpos accordingly. */
385 bitpos += ptr_bitpos;
386 if (TREE_CODE (exp) == MEM_REF
387 || TREE_CODE (exp) == TARGET_MEM_REF)
388 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
391 else if (TREE_CODE (exp) == STRING_CST)
393 /* STRING_CST are the only constant objects we allow to be not
394 wrapped inside a CONST_DECL. */
395 align = TYPE_ALIGN (TREE_TYPE (exp));
396 #ifdef CONSTANT_ALIGNMENT
397 if (CONSTANT_CLASS_P (exp))
398 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
399 #endif
400 known_alignment = true;
403 /* If there is a non-constant offset part extract the maximum
404 alignment that can prevail. */
405 if (offset)
407 unsigned int trailing_zeros = tree_ctz (offset);
408 if (trailing_zeros < HOST_BITS_PER_INT)
410 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
411 if (inner)
412 align = MIN (align, inner);
416 *alignp = align;
417 *bitposp = bitpos & (*alignp - 1);
418 return known_alignment;
421 /* For a memory reference expression EXP compute values M and N such that M
422 divides (&EXP - N) and such that N < M. If these numbers can be determined,
423 store M in alignp and N in *BITPOSP and return true. Otherwise return false
424 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
426 bool
427 get_object_alignment_1 (tree exp, unsigned int *alignp,
428 unsigned HOST_WIDE_INT *bitposp)
430 return get_object_alignment_2 (exp, alignp, bitposp, false);
433 /* Return the alignment in bits of EXP, an object. */
435 unsigned int
436 get_object_alignment (tree exp)
438 unsigned HOST_WIDE_INT bitpos = 0;
439 unsigned int align;
441 get_object_alignment_1 (exp, &align, &bitpos);
443 /* align and bitpos now specify known low bits of the pointer.
444 ptr & (align - 1) == bitpos. */
446 if (bitpos != 0)
447 align = (bitpos & -bitpos);
448 return align;
451 /* For a pointer valued expression EXP compute values M and N such that M
452 divides (EXP - N) and such that N < M. If these numbers can be determined,
453 store M in alignp and N in *BITPOSP and return true. Return false if
454 the results are just a conservative approximation.
456 If EXP is not a pointer, false is returned too. */
458 bool
459 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
460 unsigned HOST_WIDE_INT *bitposp)
462 STRIP_NOPS (exp);
464 if (TREE_CODE (exp) == ADDR_EXPR)
465 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
466 alignp, bitposp, true);
467 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
469 unsigned int align;
470 unsigned HOST_WIDE_INT bitpos;
471 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
472 &align, &bitpos);
473 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
474 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
475 else
477 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
478 if (trailing_zeros < HOST_BITS_PER_INT)
480 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
481 if (inner)
482 align = MIN (align, inner);
485 *alignp = align;
486 *bitposp = bitpos & (align - 1);
487 return res;
489 else if (TREE_CODE (exp) == SSA_NAME
490 && POINTER_TYPE_P (TREE_TYPE (exp)))
492 unsigned int ptr_align, ptr_misalign;
493 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
495 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
497 *bitposp = ptr_misalign * BITS_PER_UNIT;
498 *alignp = ptr_align * BITS_PER_UNIT;
499 /* We cannot really tell whether this result is an approximation. */
500 return true;
502 else
504 *bitposp = 0;
505 *alignp = BITS_PER_UNIT;
506 return false;
509 else if (TREE_CODE (exp) == INTEGER_CST)
511 *alignp = BIGGEST_ALIGNMENT;
512 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
513 & (BIGGEST_ALIGNMENT - 1));
514 return true;
517 *bitposp = 0;
518 *alignp = BITS_PER_UNIT;
519 return false;
522 /* Return the alignment in bits of EXP, a pointer valued expression.
523 The alignment returned is, by default, the alignment of the thing that
524 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
526 Otherwise, look at the expression to see if we can do better, i.e., if the
527 expression is actually pointing at an object whose alignment is tighter. */
529 unsigned int
530 get_pointer_alignment (tree exp)
532 unsigned HOST_WIDE_INT bitpos = 0;
533 unsigned int align;
535 get_pointer_alignment_1 (exp, &align, &bitpos);
537 /* align and bitpos now specify known low bits of the pointer.
538 ptr & (align - 1) == bitpos. */
540 if (bitpos != 0)
541 align = (bitpos & -bitpos);
543 return align;
546 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
547 way, because it could contain a zero byte in the middle.
548 TREE_STRING_LENGTH is the size of the character array, not the string.
550 ONLY_VALUE should be nonzero if the result is not going to be emitted
551 into the instruction stream and zero if it is going to be expanded.
552 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
553 is returned, otherwise NULL, since
554 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
555 evaluate the side-effects.
557 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
558 accesses. Note that this implies the result is not going to be emitted
559 into the instruction stream.
561 The value returned is of type `ssizetype'.
563 Unfortunately, string_constant can't access the values of const char
564 arrays with initializers, so neither can we do so here. */
566 tree
567 c_strlen (tree src, int only_value)
569 tree offset_node;
570 HOST_WIDE_INT offset;
571 int max;
572 const char *ptr;
573 location_t loc;
575 STRIP_NOPS (src);
576 if (TREE_CODE (src) == COND_EXPR
577 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
579 tree len1, len2;
581 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
582 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
583 if (tree_int_cst_equal (len1, len2))
584 return len1;
587 if (TREE_CODE (src) == COMPOUND_EXPR
588 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
589 return c_strlen (TREE_OPERAND (src, 1), only_value);
591 loc = EXPR_LOC_OR_LOC (src, input_location);
593 src = string_constant (src, &offset_node);
594 if (src == 0)
595 return NULL_TREE;
597 max = TREE_STRING_LENGTH (src) - 1;
598 ptr = TREE_STRING_POINTER (src);
600 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
602 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
603 compute the offset to the following null if we don't know where to
604 start searching for it. */
605 int i;
607 for (i = 0; i < max; i++)
608 if (ptr[i] == 0)
609 return NULL_TREE;
611 /* We don't know the starting offset, but we do know that the string
612 has no internal zero bytes. We can assume that the offset falls
613 within the bounds of the string; otherwise, the programmer deserves
614 what he gets. Subtract the offset from the length of the string,
615 and return that. This would perhaps not be valid if we were dealing
616 with named arrays in addition to literal string constants. */
618 return size_diffop_loc (loc, size_int (max), offset_node);
621 /* We have a known offset into the string. Start searching there for
622 a null character if we can represent it as a single HOST_WIDE_INT. */
623 if (offset_node == 0)
624 offset = 0;
625 else if (! tree_fits_shwi_p (offset_node))
626 offset = -1;
627 else
628 offset = tree_to_shwi (offset_node);
630 /* If the offset is known to be out of bounds, warn, and call strlen at
631 runtime. */
632 if (offset < 0 || offset > max)
634 /* Suppress multiple warnings for propagated constant strings. */
635 if (only_value != 2
636 && !TREE_NO_WARNING (src))
638 warning_at (loc, 0, "offset outside bounds of constant string");
639 TREE_NO_WARNING (src) = 1;
641 return NULL_TREE;
644 /* Use strlen to search for the first zero byte. Since any strings
645 constructed with build_string will have nulls appended, we win even
646 if we get handed something like (char[4])"abcd".
648 Since OFFSET is our starting index into the string, no further
649 calculation is needed. */
650 return ssize_int (strlen (ptr + offset));
653 /* Return a char pointer for a C string if it is a string constant
654 or sum of string constant and integer constant. */
656 const char *
657 c_getstr (tree src)
659 tree offset_node;
661 src = string_constant (src, &offset_node);
662 if (src == 0)
663 return 0;
665 if (offset_node == 0)
666 return TREE_STRING_POINTER (src);
667 else if (!tree_fits_uhwi_p (offset_node)
668 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
669 return 0;
671 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
674 /* Return a constant integer corresponding to target reading
675 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
677 static rtx
678 c_readstr (const char *str, machine_mode mode)
680 HOST_WIDE_INT ch;
681 unsigned int i, j;
682 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
684 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
685 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
686 / HOST_BITS_PER_WIDE_INT;
688 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
689 for (i = 0; i < len; i++)
690 tmp[i] = 0;
692 ch = 1;
693 for (i = 0; i < GET_MODE_SIZE (mode); i++)
695 j = i;
696 if (WORDS_BIG_ENDIAN)
697 j = GET_MODE_SIZE (mode) - i - 1;
698 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
699 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
700 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
701 j *= BITS_PER_UNIT;
703 if (ch)
704 ch = (unsigned char) str[i];
705 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
708 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
709 return immed_wide_int_const (c, mode);
712 /* Cast a target constant CST to target CHAR and if that value fits into
713 host char type, return zero and put that value into variable pointed to by
714 P. */
716 static int
717 target_char_cast (tree cst, char *p)
719 unsigned HOST_WIDE_INT val, hostval;
721 if (TREE_CODE (cst) != INTEGER_CST
722 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
723 return 1;
725 /* Do not care if it fits or not right here. */
726 val = TREE_INT_CST_LOW (cst);
728 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
729 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
731 hostval = val;
732 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
733 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
735 if (val != hostval)
736 return 1;
738 *p = hostval;
739 return 0;
742 /* Similar to save_expr, but assumes that arbitrary code is not executed
743 in between the multiple evaluations. In particular, we assume that a
744 non-addressable local variable will not be modified. */
746 static tree
747 builtin_save_expr (tree exp)
749 if (TREE_CODE (exp) == SSA_NAME
750 || (TREE_ADDRESSABLE (exp) == 0
751 && (TREE_CODE (exp) == PARM_DECL
752 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
753 return exp;
755 return save_expr (exp);
758 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
759 times to get the address of either a higher stack frame, or a return
760 address located within it (depending on FNDECL_CODE). */
762 static rtx
763 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
765 int i;
767 #ifdef INITIAL_FRAME_ADDRESS_RTX
768 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
769 #else
770 rtx tem;
772 /* For a zero count with __builtin_return_address, we don't care what
773 frame address we return, because target-specific definitions will
774 override us. Therefore frame pointer elimination is OK, and using
775 the soft frame pointer is OK.
777 For a nonzero count, or a zero count with __builtin_frame_address,
778 we require a stable offset from the current frame pointer to the
779 previous one, so we must use the hard frame pointer, and
780 we must disable frame pointer elimination. */
781 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
782 tem = frame_pointer_rtx;
783 else
785 tem = hard_frame_pointer_rtx;
787 /* Tell reload not to eliminate the frame pointer. */
788 crtl->accesses_prior_frames = 1;
790 #endif
792 /* Some machines need special handling before we can access
793 arbitrary frames. For example, on the SPARC, we must first flush
794 all register windows to the stack. */
795 #ifdef SETUP_FRAME_ADDRESSES
796 if (count > 0)
797 SETUP_FRAME_ADDRESSES ();
798 #endif
800 /* On the SPARC, the return address is not in the frame, it is in a
801 register. There is no way to access it off of the current frame
802 pointer, but it can be accessed off the previous frame pointer by
803 reading the value from the register window save area. */
804 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
805 count--;
807 /* Scan back COUNT frames to the specified frame. */
808 for (i = 0; i < count; i++)
810 /* Assume the dynamic chain pointer is in the word that the
811 frame address points to, unless otherwise specified. */
812 #ifdef DYNAMIC_CHAIN_ADDRESS
813 tem = DYNAMIC_CHAIN_ADDRESS (tem);
814 #endif
815 tem = memory_address (Pmode, tem);
816 tem = gen_frame_mem (Pmode, tem);
817 tem = copy_to_reg (tem);
820 /* For __builtin_frame_address, return what we've got. But, on
821 the SPARC for example, we may have to add a bias. */
822 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
823 #ifdef FRAME_ADDR_RTX
824 return FRAME_ADDR_RTX (tem);
825 #else
826 return tem;
827 #endif
829 /* For __builtin_return_address, get the return address from that frame. */
830 #ifdef RETURN_ADDR_RTX
831 tem = RETURN_ADDR_RTX (count, tem);
832 #else
833 tem = memory_address (Pmode,
834 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
835 tem = gen_frame_mem (Pmode, tem);
836 #endif
837 return tem;
840 /* Alias set used for setjmp buffer. */
841 static alias_set_type setjmp_alias_set = -1;
843 /* Construct the leading half of a __builtin_setjmp call. Control will
844 return to RECEIVER_LABEL. This is also called directly by the SJLJ
845 exception handling code. */
847 void
848 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
850 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
851 rtx stack_save;
852 rtx mem;
854 if (setjmp_alias_set == -1)
855 setjmp_alias_set = new_alias_set ();
857 buf_addr = convert_memory_address (Pmode, buf_addr);
859 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
861 /* We store the frame pointer and the address of receiver_label in
862 the buffer and use the rest of it for the stack save area, which
863 is machine-dependent. */
865 mem = gen_rtx_MEM (Pmode, buf_addr);
866 set_mem_alias_set (mem, setjmp_alias_set);
867 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
869 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
870 GET_MODE_SIZE (Pmode))),
871 set_mem_alias_set (mem, setjmp_alias_set);
873 emit_move_insn (validize_mem (mem),
874 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
876 stack_save = gen_rtx_MEM (sa_mode,
877 plus_constant (Pmode, buf_addr,
878 2 * GET_MODE_SIZE (Pmode)));
879 set_mem_alias_set (stack_save, setjmp_alias_set);
880 emit_stack_save (SAVE_NONLOCAL, &stack_save);
882 /* If there is further processing to do, do it. */
883 if (targetm.have_builtin_setjmp_setup ())
884 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
886 /* We have a nonlocal label. */
887 cfun->has_nonlocal_label = 1;
890 /* Construct the trailing part of a __builtin_setjmp call. This is
891 also called directly by the SJLJ exception handling code.
892 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
894 void
895 expand_builtin_setjmp_receiver (rtx receiver_label)
897 rtx chain;
899 /* Mark the FP as used when we get here, so we have to make sure it's
900 marked as used by this function. */
901 emit_use (hard_frame_pointer_rtx);
903 /* Mark the static chain as clobbered here so life information
904 doesn't get messed up for it. */
905 chain = targetm.calls.static_chain (current_function_decl, true);
906 if (chain && REG_P (chain))
907 emit_clobber (chain);
909 /* Now put in the code to restore the frame pointer, and argument
910 pointer, if needed. */
911 if (! targetm.have_nonlocal_goto ())
913 /* First adjust our frame pointer to its actual value. It was
914 previously set to the start of the virtual area corresponding to
915 the stacked variables when we branched here and now needs to be
916 adjusted to the actual hardware fp value.
918 Assignments to virtual registers are converted by
919 instantiate_virtual_regs into the corresponding assignment
920 to the underlying register (fp in this case) that makes
921 the original assignment true.
922 So the following insn will actually be decrementing fp by
923 STARTING_FRAME_OFFSET. */
924 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
926 /* Restoring the frame pointer also modifies the hard frame pointer.
927 Mark it used (so that the previous assignment remains live once
928 the frame pointer is eliminated) and clobbered (to represent the
929 implicit update from the assignment). */
930 emit_use (hard_frame_pointer_rtx);
931 emit_clobber (hard_frame_pointer_rtx);
934 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
935 if (fixed_regs[ARG_POINTER_REGNUM])
937 #ifdef ELIMINABLE_REGS
938 /* If the argument pointer can be eliminated in favor of the
939 frame pointer, we don't need to restore it. We assume here
940 that if such an elimination is present, it can always be used.
941 This is the case on all known machines; if we don't make this
942 assumption, we do unnecessary saving on many machines. */
943 size_t i;
944 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
946 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
947 if (elim_regs[i].from == ARG_POINTER_REGNUM
948 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
949 break;
951 if (i == ARRAY_SIZE (elim_regs))
952 #endif
954 /* Now restore our arg pointer from the address at which it
955 was saved in our stack frame. */
956 emit_move_insn (crtl->args.internal_arg_pointer,
957 copy_to_reg (get_arg_pointer_save_area ()));
960 #endif
962 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
963 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
964 else if (targetm.have_nonlocal_goto_receiver ())
965 emit_insn (targetm.gen_nonlocal_goto_receiver ());
966 else
967 { /* Nothing */ }
969 /* We must not allow the code we just generated to be reordered by
970 scheduling. Specifically, the update of the frame pointer must
971 happen immediately, not later. */
972 emit_insn (gen_blockage ());
975 /* __builtin_longjmp is passed a pointer to an array of five words (not
976 all will be used on all machines). It operates similarly to the C
977 library function of the same name, but is more efficient. Much of
978 the code below is copied from the handling of non-local gotos. */
980 static void
981 expand_builtin_longjmp (rtx buf_addr, rtx value)
983 rtx fp, lab, stack;
984 rtx_insn *insn, *last;
985 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
987 /* DRAP is needed for stack realign if longjmp is expanded to current
988 function */
989 if (SUPPORTS_STACK_ALIGNMENT)
990 crtl->need_drap = true;
992 if (setjmp_alias_set == -1)
993 setjmp_alias_set = new_alias_set ();
995 buf_addr = convert_memory_address (Pmode, buf_addr);
997 buf_addr = force_reg (Pmode, buf_addr);
999 /* We require that the user must pass a second argument of 1, because
1000 that is what builtin_setjmp will return. */
1001 gcc_assert (value == const1_rtx);
1003 last = get_last_insn ();
1004 if (targetm.have_builtin_longjmp ())
1005 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1006 else
1008 fp = gen_rtx_MEM (Pmode, buf_addr);
1009 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1010 GET_MODE_SIZE (Pmode)));
1012 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1013 2 * GET_MODE_SIZE (Pmode)));
1014 set_mem_alias_set (fp, setjmp_alias_set);
1015 set_mem_alias_set (lab, setjmp_alias_set);
1016 set_mem_alias_set (stack, setjmp_alias_set);
1018 /* Pick up FP, label, and SP from the block and jump. This code is
1019 from expand_goto in stmt.c; see there for detailed comments. */
1020 if (targetm.have_nonlocal_goto ())
1021 /* We have to pass a value to the nonlocal_goto pattern that will
1022 get copied into the static_chain pointer, but it does not matter
1023 what that value is, because builtin_setjmp does not use it. */
1024 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1025 else
1027 lab = copy_to_reg (lab);
1029 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1030 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1032 emit_move_insn (hard_frame_pointer_rtx, fp);
1033 emit_stack_restore (SAVE_NONLOCAL, stack);
1035 emit_use (hard_frame_pointer_rtx);
1036 emit_use (stack_pointer_rtx);
1037 emit_indirect_jump (lab);
1041 /* Search backwards and mark the jump insn as a non-local goto.
1042 Note that this precludes the use of __builtin_longjmp to a
1043 __builtin_setjmp target in the same function. However, we've
1044 already cautioned the user that these functions are for
1045 internal exception handling use only. */
1046 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1048 gcc_assert (insn != last);
1050 if (JUMP_P (insn))
1052 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1053 break;
1055 else if (CALL_P (insn))
1056 break;
1060 static inline bool
1061 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1063 return (iter->i < iter->n);
1066 /* This function validates the types of a function call argument list
1067 against a specified list of tree_codes. If the last specifier is a 0,
1068 that represents an ellipses, otherwise the last specifier must be a
1069 VOID_TYPE. */
1071 static bool
1072 validate_arglist (const_tree callexpr, ...)
1074 enum tree_code code;
1075 bool res = 0;
1076 va_list ap;
1077 const_call_expr_arg_iterator iter;
1078 const_tree arg;
1080 va_start (ap, callexpr);
1081 init_const_call_expr_arg_iterator (callexpr, &iter);
1085 code = (enum tree_code) va_arg (ap, int);
1086 switch (code)
1088 case 0:
1089 /* This signifies an ellipses, any further arguments are all ok. */
1090 res = true;
1091 goto end;
1092 case VOID_TYPE:
1093 /* This signifies an endlink, if no arguments remain, return
1094 true, otherwise return false. */
1095 res = !more_const_call_expr_args_p (&iter);
1096 goto end;
1097 default:
1098 /* If no parameters remain or the parameter's code does not
1099 match the specified code, return false. Otherwise continue
1100 checking any remaining arguments. */
1101 arg = next_const_call_expr_arg (&iter);
1102 if (!validate_arg (arg, code))
1103 goto end;
1104 break;
1107 while (1);
1109 /* We need gotos here since we can only have one VA_CLOSE in a
1110 function. */
1111 end: ;
1112 va_end (ap);
1114 return res;
1117 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1118 and the address of the save area. */
1120 static rtx
1121 expand_builtin_nonlocal_goto (tree exp)
1123 tree t_label, t_save_area;
1124 rtx r_label, r_save_area, r_fp, r_sp;
1125 rtx_insn *insn;
1127 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1128 return NULL_RTX;
1130 t_label = CALL_EXPR_ARG (exp, 0);
1131 t_save_area = CALL_EXPR_ARG (exp, 1);
1133 r_label = expand_normal (t_label);
1134 r_label = convert_memory_address (Pmode, r_label);
1135 r_save_area = expand_normal (t_save_area);
1136 r_save_area = convert_memory_address (Pmode, r_save_area);
1137 /* Copy the address of the save location to a register just in case it was
1138 based on the frame pointer. */
1139 r_save_area = copy_to_reg (r_save_area);
1140 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1141 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1142 plus_constant (Pmode, r_save_area,
1143 GET_MODE_SIZE (Pmode)));
1145 crtl->has_nonlocal_goto = 1;
1147 /* ??? We no longer need to pass the static chain value, afaik. */
1148 if (targetm.have_nonlocal_goto ())
1149 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1150 else
1152 r_label = copy_to_reg (r_label);
1154 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1155 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1157 /* Restore frame pointer for containing function. */
1158 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1159 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1161 /* USE of hard_frame_pointer_rtx added for consistency;
1162 not clear if really needed. */
1163 emit_use (hard_frame_pointer_rtx);
1164 emit_use (stack_pointer_rtx);
1166 /* If the architecture is using a GP register, we must
1167 conservatively assume that the target function makes use of it.
1168 The prologue of functions with nonlocal gotos must therefore
1169 initialize the GP register to the appropriate value, and we
1170 must then make sure that this value is live at the point
1171 of the jump. (Note that this doesn't necessarily apply
1172 to targets with a nonlocal_goto pattern; they are free
1173 to implement it in their own way. Note also that this is
1174 a no-op if the GP register is a global invariant.) */
1175 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1176 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1177 emit_use (pic_offset_table_rtx);
1179 emit_indirect_jump (r_label);
1182 /* Search backwards to the jump insn and mark it as a
1183 non-local goto. */
1184 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1186 if (JUMP_P (insn))
1188 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1189 break;
1191 else if (CALL_P (insn))
1192 break;
1195 return const0_rtx;
1198 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1199 (not all will be used on all machines) that was passed to __builtin_setjmp.
1200 It updates the stack pointer in that block to the current value. This is
1201 also called directly by the SJLJ exception handling code. */
1203 void
1204 expand_builtin_update_setjmp_buf (rtx buf_addr)
1206 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1207 rtx stack_save
1208 = gen_rtx_MEM (sa_mode,
1209 memory_address
1210 (sa_mode,
1211 plus_constant (Pmode, buf_addr,
1212 2 * GET_MODE_SIZE (Pmode))));
1214 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1217 /* Expand a call to __builtin_prefetch. For a target that does not support
1218 data prefetch, evaluate the memory address argument in case it has side
1219 effects. */
1221 static void
1222 expand_builtin_prefetch (tree exp)
1224 tree arg0, arg1, arg2;
1225 int nargs;
1226 rtx op0, op1, op2;
1228 if (!validate_arglist (exp, POINTER_TYPE, 0))
1229 return;
1231 arg0 = CALL_EXPR_ARG (exp, 0);
1233 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1234 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1235 locality). */
1236 nargs = call_expr_nargs (exp);
1237 if (nargs > 1)
1238 arg1 = CALL_EXPR_ARG (exp, 1);
1239 else
1240 arg1 = integer_zero_node;
1241 if (nargs > 2)
1242 arg2 = CALL_EXPR_ARG (exp, 2);
1243 else
1244 arg2 = integer_three_node;
1246 /* Argument 0 is an address. */
1247 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1249 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1250 if (TREE_CODE (arg1) != INTEGER_CST)
1252 error ("second argument to %<__builtin_prefetch%> must be a constant");
1253 arg1 = integer_zero_node;
1255 op1 = expand_normal (arg1);
1256 /* Argument 1 must be either zero or one. */
1257 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1259 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1260 " using zero");
1261 op1 = const0_rtx;
1264 /* Argument 2 (locality) must be a compile-time constant int. */
1265 if (TREE_CODE (arg2) != INTEGER_CST)
1267 error ("third argument to %<__builtin_prefetch%> must be a constant");
1268 arg2 = integer_zero_node;
1270 op2 = expand_normal (arg2);
1271 /* Argument 2 must be 0, 1, 2, or 3. */
1272 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1274 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1275 op2 = const0_rtx;
1278 if (targetm.have_prefetch ())
1280 struct expand_operand ops[3];
1282 create_address_operand (&ops[0], op0);
1283 create_integer_operand (&ops[1], INTVAL (op1));
1284 create_integer_operand (&ops[2], INTVAL (op2));
1285 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1286 return;
1289 /* Don't do anything with direct references to volatile memory, but
1290 generate code to handle other side effects. */
1291 if (!MEM_P (op0) && side_effects_p (op0))
1292 emit_insn (op0);
1295 /* Get a MEM rtx for expression EXP which is the address of an operand
1296 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1297 the maximum length of the block of memory that might be accessed or
1298 NULL if unknown. */
1300 static rtx
1301 get_memory_rtx (tree exp, tree len)
1303 tree orig_exp = exp;
1304 rtx addr, mem;
1306 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1307 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1308 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1309 exp = TREE_OPERAND (exp, 0);
1311 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1312 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1314 /* Get an expression we can use to find the attributes to assign to MEM.
1315 First remove any nops. */
1316 while (CONVERT_EXPR_P (exp)
1317 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1318 exp = TREE_OPERAND (exp, 0);
1320 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1321 (as builtin stringops may alias with anything). */
1322 exp = fold_build2 (MEM_REF,
1323 build_array_type (char_type_node,
1324 build_range_type (sizetype,
1325 size_one_node, len)),
1326 exp, build_int_cst (ptr_type_node, 0));
1328 /* If the MEM_REF has no acceptable address, try to get the base object
1329 from the original address we got, and build an all-aliasing
1330 unknown-sized access to that one. */
1331 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1332 set_mem_attributes (mem, exp, 0);
1333 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1334 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1335 0))))
1337 exp = build_fold_addr_expr (exp);
1338 exp = fold_build2 (MEM_REF,
1339 build_array_type (char_type_node,
1340 build_range_type (sizetype,
1341 size_zero_node,
1342 NULL)),
1343 exp, build_int_cst (ptr_type_node, 0));
1344 set_mem_attributes (mem, exp, 0);
1346 set_mem_alias_set (mem, 0);
1347 return mem;
1350 /* Built-in functions to perform an untyped call and return. */
1352 #define apply_args_mode \
1353 (this_target_builtins->x_apply_args_mode)
1354 #define apply_result_mode \
1355 (this_target_builtins->x_apply_result_mode)
1357 /* Return the size required for the block returned by __builtin_apply_args,
1358 and initialize apply_args_mode. */
1360 static int
1361 apply_args_size (void)
1363 static int size = -1;
1364 int align;
1365 unsigned int regno;
1366 machine_mode mode;
1368 /* The values computed by this function never change. */
1369 if (size < 0)
1371 /* The first value is the incoming arg-pointer. */
1372 size = GET_MODE_SIZE (Pmode);
1374 /* The second value is the structure value address unless this is
1375 passed as an "invisible" first argument. */
1376 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1377 size += GET_MODE_SIZE (Pmode);
1379 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1380 if (FUNCTION_ARG_REGNO_P (regno))
1382 mode = targetm.calls.get_raw_arg_mode (regno);
1384 gcc_assert (mode != VOIDmode);
1386 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1387 if (size % align != 0)
1388 size = CEIL (size, align) * align;
1389 size += GET_MODE_SIZE (mode);
1390 apply_args_mode[regno] = mode;
1392 else
1394 apply_args_mode[regno] = VOIDmode;
1397 return size;
1400 /* Return the size required for the block returned by __builtin_apply,
1401 and initialize apply_result_mode. */
1403 static int
1404 apply_result_size (void)
1406 static int size = -1;
1407 int align, regno;
1408 machine_mode mode;
1410 /* The values computed by this function never change. */
1411 if (size < 0)
1413 size = 0;
1415 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1416 if (targetm.calls.function_value_regno_p (regno))
1418 mode = targetm.calls.get_raw_result_mode (regno);
1420 gcc_assert (mode != VOIDmode);
1422 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1423 if (size % align != 0)
1424 size = CEIL (size, align) * align;
1425 size += GET_MODE_SIZE (mode);
1426 apply_result_mode[regno] = mode;
1428 else
1429 apply_result_mode[regno] = VOIDmode;
1431 /* Allow targets that use untyped_call and untyped_return to override
1432 the size so that machine-specific information can be stored here. */
1433 #ifdef APPLY_RESULT_SIZE
1434 size = APPLY_RESULT_SIZE;
1435 #endif
1437 return size;
1440 /* Create a vector describing the result block RESULT. If SAVEP is true,
1441 the result block is used to save the values; otherwise it is used to
1442 restore the values. */
1444 static rtx
1445 result_vector (int savep, rtx result)
1447 int regno, size, align, nelts;
1448 machine_mode mode;
1449 rtx reg, mem;
1450 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1452 size = nelts = 0;
1453 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1454 if ((mode = apply_result_mode[regno]) != VOIDmode)
1456 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1457 if (size % align != 0)
1458 size = CEIL (size, align) * align;
1459 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1460 mem = adjust_address (result, mode, size);
1461 savevec[nelts++] = (savep
1462 ? gen_rtx_SET (mem, reg)
1463 : gen_rtx_SET (reg, mem));
1464 size += GET_MODE_SIZE (mode);
1466 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1469 /* Save the state required to perform an untyped call with the same
1470 arguments as were passed to the current function. */
1472 static rtx
1473 expand_builtin_apply_args_1 (void)
1475 rtx registers, tem;
1476 int size, align, regno;
1477 machine_mode mode;
1478 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1480 /* Create a block where the arg-pointer, structure value address,
1481 and argument registers can be saved. */
1482 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1484 /* Walk past the arg-pointer and structure value address. */
1485 size = GET_MODE_SIZE (Pmode);
1486 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1487 size += GET_MODE_SIZE (Pmode);
1489 /* Save each register used in calling a function to the block. */
1490 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1491 if ((mode = apply_args_mode[regno]) != VOIDmode)
1493 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1494 if (size % align != 0)
1495 size = CEIL (size, align) * align;
1497 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1499 emit_move_insn (adjust_address (registers, mode, size), tem);
1500 size += GET_MODE_SIZE (mode);
1503 /* Save the arg pointer to the block. */
1504 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1505 /* We need the pointer as the caller actually passed them to us, not
1506 as we might have pretended they were passed. Make sure it's a valid
1507 operand, as emit_move_insn isn't expected to handle a PLUS. */
1508 if (STACK_GROWS_DOWNWARD)
1510 = force_operand (plus_constant (Pmode, tem,
1511 crtl->args.pretend_args_size),
1512 NULL_RTX);
1513 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1515 size = GET_MODE_SIZE (Pmode);
1517 /* Save the structure value address unless this is passed as an
1518 "invisible" first argument. */
1519 if (struct_incoming_value)
1521 emit_move_insn (adjust_address (registers, Pmode, size),
1522 copy_to_reg (struct_incoming_value));
1523 size += GET_MODE_SIZE (Pmode);
1526 /* Return the address of the block. */
1527 return copy_addr_to_reg (XEXP (registers, 0));
1530 /* __builtin_apply_args returns block of memory allocated on
1531 the stack into which is stored the arg pointer, structure
1532 value address, static chain, and all the registers that might
1533 possibly be used in performing a function call. The code is
1534 moved to the start of the function so the incoming values are
1535 saved. */
1537 static rtx
1538 expand_builtin_apply_args (void)
1540 /* Don't do __builtin_apply_args more than once in a function.
1541 Save the result of the first call and reuse it. */
1542 if (apply_args_value != 0)
1543 return apply_args_value;
1545 /* When this function is called, it means that registers must be
1546 saved on entry to this function. So we migrate the
1547 call to the first insn of this function. */
1548 rtx temp;
1550 start_sequence ();
1551 temp = expand_builtin_apply_args_1 ();
1552 rtx_insn *seq = get_insns ();
1553 end_sequence ();
1555 apply_args_value = temp;
1557 /* Put the insns after the NOTE that starts the function.
1558 If this is inside a start_sequence, make the outer-level insn
1559 chain current, so the code is placed at the start of the
1560 function. If internal_arg_pointer is a non-virtual pseudo,
1561 it needs to be placed after the function that initializes
1562 that pseudo. */
1563 push_topmost_sequence ();
1564 if (REG_P (crtl->args.internal_arg_pointer)
1565 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1566 emit_insn_before (seq, parm_birth_insn);
1567 else
1568 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1569 pop_topmost_sequence ();
1570 return temp;
1574 /* Perform an untyped call and save the state required to perform an
1575 untyped return of whatever value was returned by the given function. */
1577 static rtx
1578 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1580 int size, align, regno;
1581 machine_mode mode;
1582 rtx incoming_args, result, reg, dest, src;
1583 rtx_call_insn *call_insn;
1584 rtx old_stack_level = 0;
1585 rtx call_fusage = 0;
1586 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1588 arguments = convert_memory_address (Pmode, arguments);
1590 /* Create a block where the return registers can be saved. */
1591 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1593 /* Fetch the arg pointer from the ARGUMENTS block. */
1594 incoming_args = gen_reg_rtx (Pmode);
1595 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1596 if (!STACK_GROWS_DOWNWARD)
1597 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1598 incoming_args, 0, OPTAB_LIB_WIDEN);
1600 /* Push a new argument block and copy the arguments. Do not allow
1601 the (potential) memcpy call below to interfere with our stack
1602 manipulations. */
1603 do_pending_stack_adjust ();
1604 NO_DEFER_POP;
1606 /* Save the stack with nonlocal if available. */
1607 if (targetm.have_save_stack_nonlocal ())
1608 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1609 else
1610 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1612 /* Allocate a block of memory onto the stack and copy the memory
1613 arguments to the outgoing arguments address. We can pass TRUE
1614 as the 4th argument because we just saved the stack pointer
1615 and will restore it right after the call. */
1616 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1618 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1619 may have already set current_function_calls_alloca to true.
1620 current_function_calls_alloca won't be set if argsize is zero,
1621 so we have to guarantee need_drap is true here. */
1622 if (SUPPORTS_STACK_ALIGNMENT)
1623 crtl->need_drap = true;
1625 dest = virtual_outgoing_args_rtx;
1626 if (!STACK_GROWS_DOWNWARD)
1628 if (CONST_INT_P (argsize))
1629 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1630 else
1631 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1633 dest = gen_rtx_MEM (BLKmode, dest);
1634 set_mem_align (dest, PARM_BOUNDARY);
1635 src = gen_rtx_MEM (BLKmode, incoming_args);
1636 set_mem_align (src, PARM_BOUNDARY);
1637 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1639 /* Refer to the argument block. */
1640 apply_args_size ();
1641 arguments = gen_rtx_MEM (BLKmode, arguments);
1642 set_mem_align (arguments, PARM_BOUNDARY);
1644 /* Walk past the arg-pointer and structure value address. */
1645 size = GET_MODE_SIZE (Pmode);
1646 if (struct_value)
1647 size += GET_MODE_SIZE (Pmode);
1649 /* Restore each of the registers previously saved. Make USE insns
1650 for each of these registers for use in making the call. */
1651 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1652 if ((mode = apply_args_mode[regno]) != VOIDmode)
1654 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1655 if (size % align != 0)
1656 size = CEIL (size, align) * align;
1657 reg = gen_rtx_REG (mode, regno);
1658 emit_move_insn (reg, adjust_address (arguments, mode, size));
1659 use_reg (&call_fusage, reg);
1660 size += GET_MODE_SIZE (mode);
1663 /* Restore the structure value address unless this is passed as an
1664 "invisible" first argument. */
1665 size = GET_MODE_SIZE (Pmode);
1666 if (struct_value)
1668 rtx value = gen_reg_rtx (Pmode);
1669 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1670 emit_move_insn (struct_value, value);
1671 if (REG_P (struct_value))
1672 use_reg (&call_fusage, struct_value);
1673 size += GET_MODE_SIZE (Pmode);
1676 /* All arguments and registers used for the call are set up by now! */
1677 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1679 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1680 and we don't want to load it into a register as an optimization,
1681 because prepare_call_address already did it if it should be done. */
1682 if (GET_CODE (function) != SYMBOL_REF)
1683 function = memory_address (FUNCTION_MODE, function);
1685 /* Generate the actual call instruction and save the return value. */
1686 if (targetm.have_untyped_call ())
1688 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1689 emit_call_insn (targetm.gen_untyped_call (mem, result,
1690 result_vector (1, result)));
1692 else
1693 #ifdef HAVE_call_value
1694 if (HAVE_call_value)
1696 rtx valreg = 0;
1698 /* Locate the unique return register. It is not possible to
1699 express a call that sets more than one return register using
1700 call_value; use untyped_call for that. In fact, untyped_call
1701 only needs to save the return registers in the given block. */
1702 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1703 if ((mode = apply_result_mode[regno]) != VOIDmode)
1705 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1707 valreg = gen_rtx_REG (mode, regno);
1710 emit_call_insn (GEN_CALL_VALUE (valreg,
1711 gen_rtx_MEM (FUNCTION_MODE, function),
1712 const0_rtx, NULL_RTX, const0_rtx));
1714 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1716 else
1717 #endif
1718 gcc_unreachable ();
1720 /* Find the CALL insn we just emitted, and attach the register usage
1721 information. */
1722 call_insn = last_call_insn ();
1723 add_function_usage_to (call_insn, call_fusage);
1725 /* Restore the stack. */
1726 if (targetm.have_save_stack_nonlocal ())
1727 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1728 else
1729 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1730 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1732 OK_DEFER_POP;
1734 /* Return the address of the result block. */
1735 result = copy_addr_to_reg (XEXP (result, 0));
1736 return convert_memory_address (ptr_mode, result);
1739 /* Perform an untyped return. */
1741 static void
1742 expand_builtin_return (rtx result)
1744 int size, align, regno;
1745 machine_mode mode;
1746 rtx reg;
1747 rtx_insn *call_fusage = 0;
1749 result = convert_memory_address (Pmode, result);
1751 apply_result_size ();
1752 result = gen_rtx_MEM (BLKmode, result);
1754 if (targetm.have_untyped_return ())
1756 rtx vector = result_vector (0, result);
1757 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1758 emit_barrier ();
1759 return;
1762 /* Restore the return value and note that each value is used. */
1763 size = 0;
1764 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1765 if ((mode = apply_result_mode[regno]) != VOIDmode)
1767 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1768 if (size % align != 0)
1769 size = CEIL (size, align) * align;
1770 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1771 emit_move_insn (reg, adjust_address (result, mode, size));
1773 push_to_sequence (call_fusage);
1774 emit_use (reg);
1775 call_fusage = get_insns ();
1776 end_sequence ();
1777 size += GET_MODE_SIZE (mode);
1780 /* Put the USE insns before the return. */
1781 emit_insn (call_fusage);
1783 /* Return whatever values was restored by jumping directly to the end
1784 of the function. */
1785 expand_naked_return ();
1788 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1790 static enum type_class
1791 type_to_class (tree type)
1793 switch (TREE_CODE (type))
1795 case VOID_TYPE: return void_type_class;
1796 case INTEGER_TYPE: return integer_type_class;
1797 case ENUMERAL_TYPE: return enumeral_type_class;
1798 case BOOLEAN_TYPE: return boolean_type_class;
1799 case POINTER_TYPE: return pointer_type_class;
1800 case REFERENCE_TYPE: return reference_type_class;
1801 case OFFSET_TYPE: return offset_type_class;
1802 case REAL_TYPE: return real_type_class;
1803 case COMPLEX_TYPE: return complex_type_class;
1804 case FUNCTION_TYPE: return function_type_class;
1805 case METHOD_TYPE: return method_type_class;
1806 case RECORD_TYPE: return record_type_class;
1807 case UNION_TYPE:
1808 case QUAL_UNION_TYPE: return union_type_class;
1809 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1810 ? string_type_class : array_type_class);
1811 case LANG_TYPE: return lang_type_class;
1812 default: return no_type_class;
1816 /* Expand a call EXP to __builtin_classify_type. */
1818 static rtx
1819 expand_builtin_classify_type (tree exp)
1821 if (call_expr_nargs (exp))
1822 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1823 return GEN_INT (no_type_class);
1826 /* This helper macro, meant to be used in mathfn_built_in below,
1827 determines which among a set of three builtin math functions is
1828 appropriate for a given type mode. The `F' and `L' cases are
1829 automatically generated from the `double' case. */
1830 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1831 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1832 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1833 fcodel = BUILT_IN_MATHFN##L ; break;
1834 /* Similar to above, but appends _R after any F/L suffix. */
1835 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1836 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1837 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1838 fcodel = BUILT_IN_MATHFN##L_R ; break;
1840 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1841 if available. If IMPLICIT is true use the implicit builtin declaration,
1842 otherwise use the explicit declaration. If we can't do the conversion,
1843 return zero. */
1845 static tree
1846 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1848 enum built_in_function fcode, fcodef, fcodel, fcode2;
1850 switch (fn)
1852 CASE_MATHFN (BUILT_IN_ACOS)
1853 CASE_MATHFN (BUILT_IN_ACOSH)
1854 CASE_MATHFN (BUILT_IN_ASIN)
1855 CASE_MATHFN (BUILT_IN_ASINH)
1856 CASE_MATHFN (BUILT_IN_ATAN)
1857 CASE_MATHFN (BUILT_IN_ATAN2)
1858 CASE_MATHFN (BUILT_IN_ATANH)
1859 CASE_MATHFN (BUILT_IN_CBRT)
1860 CASE_MATHFN (BUILT_IN_CEIL)
1861 CASE_MATHFN (BUILT_IN_CEXPI)
1862 CASE_MATHFN (BUILT_IN_COPYSIGN)
1863 CASE_MATHFN (BUILT_IN_COS)
1864 CASE_MATHFN (BUILT_IN_COSH)
1865 CASE_MATHFN (BUILT_IN_DREM)
1866 CASE_MATHFN (BUILT_IN_ERF)
1867 CASE_MATHFN (BUILT_IN_ERFC)
1868 CASE_MATHFN (BUILT_IN_EXP)
1869 CASE_MATHFN (BUILT_IN_EXP10)
1870 CASE_MATHFN (BUILT_IN_EXP2)
1871 CASE_MATHFN (BUILT_IN_EXPM1)
1872 CASE_MATHFN (BUILT_IN_FABS)
1873 CASE_MATHFN (BUILT_IN_FDIM)
1874 CASE_MATHFN (BUILT_IN_FLOOR)
1875 CASE_MATHFN (BUILT_IN_FMA)
1876 CASE_MATHFN (BUILT_IN_FMAX)
1877 CASE_MATHFN (BUILT_IN_FMIN)
1878 CASE_MATHFN (BUILT_IN_FMOD)
1879 CASE_MATHFN (BUILT_IN_FREXP)
1880 CASE_MATHFN (BUILT_IN_GAMMA)
1881 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1882 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1883 CASE_MATHFN (BUILT_IN_HYPOT)
1884 CASE_MATHFN (BUILT_IN_ILOGB)
1885 CASE_MATHFN (BUILT_IN_ICEIL)
1886 CASE_MATHFN (BUILT_IN_IFLOOR)
1887 CASE_MATHFN (BUILT_IN_INF)
1888 CASE_MATHFN (BUILT_IN_IRINT)
1889 CASE_MATHFN (BUILT_IN_IROUND)
1890 CASE_MATHFN (BUILT_IN_ISINF)
1891 CASE_MATHFN (BUILT_IN_J0)
1892 CASE_MATHFN (BUILT_IN_J1)
1893 CASE_MATHFN (BUILT_IN_JN)
1894 CASE_MATHFN (BUILT_IN_LCEIL)
1895 CASE_MATHFN (BUILT_IN_LDEXP)
1896 CASE_MATHFN (BUILT_IN_LFLOOR)
1897 CASE_MATHFN (BUILT_IN_LGAMMA)
1898 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1899 CASE_MATHFN (BUILT_IN_LLCEIL)
1900 CASE_MATHFN (BUILT_IN_LLFLOOR)
1901 CASE_MATHFN (BUILT_IN_LLRINT)
1902 CASE_MATHFN (BUILT_IN_LLROUND)
1903 CASE_MATHFN (BUILT_IN_LOG)
1904 CASE_MATHFN (BUILT_IN_LOG10)
1905 CASE_MATHFN (BUILT_IN_LOG1P)
1906 CASE_MATHFN (BUILT_IN_LOG2)
1907 CASE_MATHFN (BUILT_IN_LOGB)
1908 CASE_MATHFN (BUILT_IN_LRINT)
1909 CASE_MATHFN (BUILT_IN_LROUND)
1910 CASE_MATHFN (BUILT_IN_MODF)
1911 CASE_MATHFN (BUILT_IN_NAN)
1912 CASE_MATHFN (BUILT_IN_NANS)
1913 CASE_MATHFN (BUILT_IN_NEARBYINT)
1914 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1915 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1916 CASE_MATHFN (BUILT_IN_POW)
1917 CASE_MATHFN (BUILT_IN_POWI)
1918 CASE_MATHFN (BUILT_IN_POW10)
1919 CASE_MATHFN (BUILT_IN_REMAINDER)
1920 CASE_MATHFN (BUILT_IN_REMQUO)
1921 CASE_MATHFN (BUILT_IN_RINT)
1922 CASE_MATHFN (BUILT_IN_ROUND)
1923 CASE_MATHFN (BUILT_IN_SCALB)
1924 CASE_MATHFN (BUILT_IN_SCALBLN)
1925 CASE_MATHFN (BUILT_IN_SCALBN)
1926 CASE_MATHFN (BUILT_IN_SIGNBIT)
1927 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1928 CASE_MATHFN (BUILT_IN_SIN)
1929 CASE_MATHFN (BUILT_IN_SINCOS)
1930 CASE_MATHFN (BUILT_IN_SINH)
1931 CASE_MATHFN (BUILT_IN_SQRT)
1932 CASE_MATHFN (BUILT_IN_TAN)
1933 CASE_MATHFN (BUILT_IN_TANH)
1934 CASE_MATHFN (BUILT_IN_TGAMMA)
1935 CASE_MATHFN (BUILT_IN_TRUNC)
1936 CASE_MATHFN (BUILT_IN_Y0)
1937 CASE_MATHFN (BUILT_IN_Y1)
1938 CASE_MATHFN (BUILT_IN_YN)
1940 default:
1941 return NULL_TREE;
1944 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1945 fcode2 = fcode;
1946 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1947 fcode2 = fcodef;
1948 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1949 fcode2 = fcodel;
1950 else
1951 return NULL_TREE;
1953 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1954 return NULL_TREE;
1956 return builtin_decl_explicit (fcode2);
1959 /* Like mathfn_built_in_1(), but always use the implicit array. */
1961 tree
1962 mathfn_built_in (tree type, enum built_in_function fn)
1964 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1967 /* If errno must be maintained, expand the RTL to check if the result,
1968 TARGET, of a built-in function call, EXP, is NaN, and if so set
1969 errno to EDOM. */
1971 static void
1972 expand_errno_check (tree exp, rtx target)
1974 rtx_code_label *lab = gen_label_rtx ();
1976 /* Test the result; if it is NaN, set errno=EDOM because
1977 the argument was not in the domain. */
1978 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1979 NULL_RTX, NULL, lab,
1980 /* The jump is very likely. */
1981 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1983 #ifdef TARGET_EDOM
1984 /* If this built-in doesn't throw an exception, set errno directly. */
1985 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1987 #ifdef GEN_ERRNO_RTX
1988 rtx errno_rtx = GEN_ERRNO_RTX;
1989 #else
1990 rtx errno_rtx
1991 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1992 #endif
1993 emit_move_insn (errno_rtx,
1994 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1995 emit_label (lab);
1996 return;
1998 #endif
2000 /* Make sure the library call isn't expanded as a tail call. */
2001 CALL_EXPR_TAILCALL (exp) = 0;
2003 /* We can't set errno=EDOM directly; let the library call do it.
2004 Pop the arguments right away in case the call gets deleted. */
2005 NO_DEFER_POP;
2006 expand_call (exp, target, 0);
2007 OK_DEFER_POP;
2008 emit_label (lab);
2011 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2012 Return NULL_RTX if a normal call should be emitted rather than expanding
2013 the function in-line. EXP is the expression that is a call to the builtin
2014 function; if convenient, the result should be placed in TARGET.
2015 SUBTARGET may be used as the target for computing one of EXP's operands. */
2017 static rtx
2018 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2020 optab builtin_optab;
2021 rtx op0;
2022 rtx_insn *insns;
2023 tree fndecl = get_callee_fndecl (exp);
2024 machine_mode mode;
2025 bool errno_set = false;
2026 bool try_widening = false;
2027 tree arg;
2029 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2030 return NULL_RTX;
2032 arg = CALL_EXPR_ARG (exp, 0);
2034 switch (DECL_FUNCTION_CODE (fndecl))
2036 CASE_FLT_FN (BUILT_IN_SQRT):
2037 errno_set = ! tree_expr_nonnegative_p (arg);
2038 try_widening = true;
2039 builtin_optab = sqrt_optab;
2040 break;
2041 CASE_FLT_FN (BUILT_IN_EXP):
2042 errno_set = true; builtin_optab = exp_optab; break;
2043 CASE_FLT_FN (BUILT_IN_EXP10):
2044 CASE_FLT_FN (BUILT_IN_POW10):
2045 errno_set = true; builtin_optab = exp10_optab; break;
2046 CASE_FLT_FN (BUILT_IN_EXP2):
2047 errno_set = true; builtin_optab = exp2_optab; break;
2048 CASE_FLT_FN (BUILT_IN_EXPM1):
2049 errno_set = true; builtin_optab = expm1_optab; break;
2050 CASE_FLT_FN (BUILT_IN_LOGB):
2051 errno_set = true; builtin_optab = logb_optab; break;
2052 CASE_FLT_FN (BUILT_IN_LOG):
2053 errno_set = true; builtin_optab = log_optab; break;
2054 CASE_FLT_FN (BUILT_IN_LOG10):
2055 errno_set = true; builtin_optab = log10_optab; break;
2056 CASE_FLT_FN (BUILT_IN_LOG2):
2057 errno_set = true; builtin_optab = log2_optab; break;
2058 CASE_FLT_FN (BUILT_IN_LOG1P):
2059 errno_set = true; builtin_optab = log1p_optab; break;
2060 CASE_FLT_FN (BUILT_IN_ASIN):
2061 builtin_optab = asin_optab; break;
2062 CASE_FLT_FN (BUILT_IN_ACOS):
2063 builtin_optab = acos_optab; break;
2064 CASE_FLT_FN (BUILT_IN_TAN):
2065 builtin_optab = tan_optab; break;
2066 CASE_FLT_FN (BUILT_IN_ATAN):
2067 builtin_optab = atan_optab; break;
2068 CASE_FLT_FN (BUILT_IN_FLOOR):
2069 builtin_optab = floor_optab; break;
2070 CASE_FLT_FN (BUILT_IN_CEIL):
2071 builtin_optab = ceil_optab; break;
2072 CASE_FLT_FN (BUILT_IN_TRUNC):
2073 builtin_optab = btrunc_optab; break;
2074 CASE_FLT_FN (BUILT_IN_ROUND):
2075 builtin_optab = round_optab; break;
2076 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2077 builtin_optab = nearbyint_optab;
2078 if (flag_trapping_math)
2079 break;
2080 /* Else fallthrough and expand as rint. */
2081 CASE_FLT_FN (BUILT_IN_RINT):
2082 builtin_optab = rint_optab; break;
2083 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2084 builtin_optab = significand_optab; break;
2085 default:
2086 gcc_unreachable ();
2089 /* Make a suitable register to place result in. */
2090 mode = TYPE_MODE (TREE_TYPE (exp));
2092 if (! flag_errno_math || ! HONOR_NANS (mode))
2093 errno_set = false;
2095 /* Before working hard, check whether the instruction is available, but try
2096 to widen the mode for specific operations. */
2097 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2098 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2099 && (!errno_set || !optimize_insn_for_size_p ()))
2101 rtx result = gen_reg_rtx (mode);
2103 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2104 need to expand the argument again. This way, we will not perform
2105 side-effects more the once. */
2106 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2108 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2110 start_sequence ();
2112 /* Compute into RESULT.
2113 Set RESULT to wherever the result comes back. */
2114 result = expand_unop (mode, builtin_optab, op0, result, 0);
2116 if (result != 0)
2118 if (errno_set)
2119 expand_errno_check (exp, result);
2121 /* Output the entire sequence. */
2122 insns = get_insns ();
2123 end_sequence ();
2124 emit_insn (insns);
2125 return result;
2128 /* If we were unable to expand via the builtin, stop the sequence
2129 (without outputting the insns) and call to the library function
2130 with the stabilized argument list. */
2131 end_sequence ();
2134 return expand_call (exp, target, target == const0_rtx);
2137 /* Expand a call to the builtin binary math functions (pow and atan2).
2138 Return NULL_RTX if a normal call should be emitted rather than expanding the
2139 function in-line. EXP is the expression that is a call to the builtin
2140 function; if convenient, the result should be placed in TARGET.
2141 SUBTARGET may be used as the target for computing one of EXP's
2142 operands. */
2144 static rtx
2145 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2147 optab builtin_optab;
2148 rtx op0, op1, result;
2149 rtx_insn *insns;
2150 int op1_type = REAL_TYPE;
2151 tree fndecl = get_callee_fndecl (exp);
2152 tree arg0, arg1;
2153 machine_mode mode;
2154 bool errno_set = true;
2156 switch (DECL_FUNCTION_CODE (fndecl))
2158 CASE_FLT_FN (BUILT_IN_SCALBN):
2159 CASE_FLT_FN (BUILT_IN_SCALBLN):
2160 CASE_FLT_FN (BUILT_IN_LDEXP):
2161 op1_type = INTEGER_TYPE;
2162 default:
2163 break;
2166 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2167 return NULL_RTX;
2169 arg0 = CALL_EXPR_ARG (exp, 0);
2170 arg1 = CALL_EXPR_ARG (exp, 1);
2172 switch (DECL_FUNCTION_CODE (fndecl))
2174 CASE_FLT_FN (BUILT_IN_POW):
2175 builtin_optab = pow_optab; break;
2176 CASE_FLT_FN (BUILT_IN_ATAN2):
2177 builtin_optab = atan2_optab; break;
2178 CASE_FLT_FN (BUILT_IN_SCALB):
2179 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2180 return 0;
2181 builtin_optab = scalb_optab; break;
2182 CASE_FLT_FN (BUILT_IN_SCALBN):
2183 CASE_FLT_FN (BUILT_IN_SCALBLN):
2184 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2185 return 0;
2186 /* Fall through... */
2187 CASE_FLT_FN (BUILT_IN_LDEXP):
2188 builtin_optab = ldexp_optab; break;
2189 CASE_FLT_FN (BUILT_IN_FMOD):
2190 builtin_optab = fmod_optab; break;
2191 CASE_FLT_FN (BUILT_IN_REMAINDER):
2192 CASE_FLT_FN (BUILT_IN_DREM):
2193 builtin_optab = remainder_optab; break;
2194 default:
2195 gcc_unreachable ();
2198 /* Make a suitable register to place result in. */
2199 mode = TYPE_MODE (TREE_TYPE (exp));
2201 /* Before working hard, check whether the instruction is available. */
2202 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2203 return NULL_RTX;
2205 result = gen_reg_rtx (mode);
2207 if (! flag_errno_math || ! HONOR_NANS (mode))
2208 errno_set = false;
2210 if (errno_set && optimize_insn_for_size_p ())
2211 return 0;
2213 /* Always stabilize the argument list. */
2214 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2215 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2217 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2218 op1 = expand_normal (arg1);
2220 start_sequence ();
2222 /* Compute into RESULT.
2223 Set RESULT to wherever the result comes back. */
2224 result = expand_binop (mode, builtin_optab, op0, op1,
2225 result, 0, OPTAB_DIRECT);
2227 /* If we were unable to expand via the builtin, stop the sequence
2228 (without outputting the insns) and call to the library function
2229 with the stabilized argument list. */
2230 if (result == 0)
2232 end_sequence ();
2233 return expand_call (exp, target, target == const0_rtx);
2236 if (errno_set)
2237 expand_errno_check (exp, result);
2239 /* Output the entire sequence. */
2240 insns = get_insns ();
2241 end_sequence ();
2242 emit_insn (insns);
2244 return result;
2247 /* Expand a call to the builtin trinary math functions (fma).
2248 Return NULL_RTX if a normal call should be emitted rather than expanding the
2249 function in-line. EXP is the expression that is a call to the builtin
2250 function; if convenient, the result should be placed in TARGET.
2251 SUBTARGET may be used as the target for computing one of EXP's
2252 operands. */
2254 static rtx
2255 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2257 optab builtin_optab;
2258 rtx op0, op1, op2, result;
2259 rtx_insn *insns;
2260 tree fndecl = get_callee_fndecl (exp);
2261 tree arg0, arg1, arg2;
2262 machine_mode mode;
2264 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2265 return NULL_RTX;
2267 arg0 = CALL_EXPR_ARG (exp, 0);
2268 arg1 = CALL_EXPR_ARG (exp, 1);
2269 arg2 = CALL_EXPR_ARG (exp, 2);
2271 switch (DECL_FUNCTION_CODE (fndecl))
2273 CASE_FLT_FN (BUILT_IN_FMA):
2274 builtin_optab = fma_optab; break;
2275 default:
2276 gcc_unreachable ();
2279 /* Make a suitable register to place result in. */
2280 mode = TYPE_MODE (TREE_TYPE (exp));
2282 /* Before working hard, check whether the instruction is available. */
2283 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2284 return NULL_RTX;
2286 result = gen_reg_rtx (mode);
2288 /* Always stabilize the argument list. */
2289 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2290 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2291 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2293 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2294 op1 = expand_normal (arg1);
2295 op2 = expand_normal (arg2);
2297 start_sequence ();
2299 /* Compute into RESULT.
2300 Set RESULT to wherever the result comes back. */
2301 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2302 result, 0);
2304 /* If we were unable to expand via the builtin, stop the sequence
2305 (without outputting the insns) and call to the library function
2306 with the stabilized argument list. */
2307 if (result == 0)
2309 end_sequence ();
2310 return expand_call (exp, target, target == const0_rtx);
2313 /* Output the entire sequence. */
2314 insns = get_insns ();
2315 end_sequence ();
2316 emit_insn (insns);
2318 return result;
2321 /* Expand a call to the builtin sin and cos math functions.
2322 Return NULL_RTX if a normal call should be emitted rather than expanding the
2323 function in-line. EXP is the expression that is a call to the builtin
2324 function; if convenient, the result should be placed in TARGET.
2325 SUBTARGET may be used as the target for computing one of EXP's
2326 operands. */
2328 static rtx
2329 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2331 optab builtin_optab;
2332 rtx op0;
2333 rtx_insn *insns;
2334 tree fndecl = get_callee_fndecl (exp);
2335 machine_mode mode;
2336 tree arg;
2338 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2339 return NULL_RTX;
2341 arg = CALL_EXPR_ARG (exp, 0);
2343 switch (DECL_FUNCTION_CODE (fndecl))
2345 CASE_FLT_FN (BUILT_IN_SIN):
2346 CASE_FLT_FN (BUILT_IN_COS):
2347 builtin_optab = sincos_optab; break;
2348 default:
2349 gcc_unreachable ();
2352 /* Make a suitable register to place result in. */
2353 mode = TYPE_MODE (TREE_TYPE (exp));
2355 /* Check if sincos insn is available, otherwise fallback
2356 to sin or cos insn. */
2357 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2358 switch (DECL_FUNCTION_CODE (fndecl))
2360 CASE_FLT_FN (BUILT_IN_SIN):
2361 builtin_optab = sin_optab; break;
2362 CASE_FLT_FN (BUILT_IN_COS):
2363 builtin_optab = cos_optab; break;
2364 default:
2365 gcc_unreachable ();
2368 /* Before working hard, check whether the instruction is available. */
2369 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2371 rtx result = gen_reg_rtx (mode);
2373 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2374 need to expand the argument again. This way, we will not perform
2375 side-effects more the once. */
2376 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2378 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2380 start_sequence ();
2382 /* Compute into RESULT.
2383 Set RESULT to wherever the result comes back. */
2384 if (builtin_optab == sincos_optab)
2386 int ok;
2388 switch (DECL_FUNCTION_CODE (fndecl))
2390 CASE_FLT_FN (BUILT_IN_SIN):
2391 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2392 break;
2393 CASE_FLT_FN (BUILT_IN_COS):
2394 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2395 break;
2396 default:
2397 gcc_unreachable ();
2399 gcc_assert (ok);
2401 else
2402 result = expand_unop (mode, builtin_optab, op0, result, 0);
2404 if (result != 0)
2406 /* Output the entire sequence. */
2407 insns = get_insns ();
2408 end_sequence ();
2409 emit_insn (insns);
2410 return result;
2413 /* If we were unable to expand via the builtin, stop the sequence
2414 (without outputting the insns) and call to the library function
2415 with the stabilized argument list. */
2416 end_sequence ();
2419 return expand_call (exp, target, target == const0_rtx);
2422 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2423 return an RTL instruction code that implements the functionality.
2424 If that isn't possible or available return CODE_FOR_nothing. */
2426 static enum insn_code
2427 interclass_mathfn_icode (tree arg, tree fndecl)
2429 bool errno_set = false;
2430 optab builtin_optab = unknown_optab;
2431 machine_mode mode;
2433 switch (DECL_FUNCTION_CODE (fndecl))
2435 CASE_FLT_FN (BUILT_IN_ILOGB):
2436 errno_set = true; builtin_optab = ilogb_optab; break;
2437 CASE_FLT_FN (BUILT_IN_ISINF):
2438 builtin_optab = isinf_optab; break;
2439 case BUILT_IN_ISNORMAL:
2440 case BUILT_IN_ISFINITE:
2441 CASE_FLT_FN (BUILT_IN_FINITE):
2442 case BUILT_IN_FINITED32:
2443 case BUILT_IN_FINITED64:
2444 case BUILT_IN_FINITED128:
2445 case BUILT_IN_ISINFD32:
2446 case BUILT_IN_ISINFD64:
2447 case BUILT_IN_ISINFD128:
2448 /* These builtins have no optabs (yet). */
2449 break;
2450 default:
2451 gcc_unreachable ();
2454 /* There's no easy way to detect the case we need to set EDOM. */
2455 if (flag_errno_math && errno_set)
2456 return CODE_FOR_nothing;
2458 /* Optab mode depends on the mode of the input argument. */
2459 mode = TYPE_MODE (TREE_TYPE (arg));
2461 if (builtin_optab)
2462 return optab_handler (builtin_optab, mode);
2463 return CODE_FOR_nothing;
2466 /* Expand a call to one of the builtin math functions that operate on
2467 floating point argument and output an integer result (ilogb, isinf,
2468 isnan, etc).
2469 Return 0 if a normal call should be emitted rather than expanding the
2470 function in-line. EXP is the expression that is a call to the builtin
2471 function; if convenient, the result should be placed in TARGET. */
2473 static rtx
2474 expand_builtin_interclass_mathfn (tree exp, rtx target)
2476 enum insn_code icode = CODE_FOR_nothing;
2477 rtx op0;
2478 tree fndecl = get_callee_fndecl (exp);
2479 machine_mode mode;
2480 tree arg;
2482 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2483 return NULL_RTX;
2485 arg = CALL_EXPR_ARG (exp, 0);
2486 icode = interclass_mathfn_icode (arg, fndecl);
2487 mode = TYPE_MODE (TREE_TYPE (arg));
2489 if (icode != CODE_FOR_nothing)
2491 struct expand_operand ops[1];
2492 rtx_insn *last = get_last_insn ();
2493 tree orig_arg = arg;
2495 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2496 need to expand the argument again. This way, we will not perform
2497 side-effects more the once. */
2498 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2500 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2502 if (mode != GET_MODE (op0))
2503 op0 = convert_to_mode (mode, op0, 0);
2505 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2506 if (maybe_legitimize_operands (icode, 0, 1, ops)
2507 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2508 return ops[0].value;
2510 delete_insns_since (last);
2511 CALL_EXPR_ARG (exp, 0) = orig_arg;
2514 return NULL_RTX;
2517 /* Expand a call to the builtin sincos math function.
2518 Return NULL_RTX if a normal call should be emitted rather than expanding the
2519 function in-line. EXP is the expression that is a call to the builtin
2520 function. */
2522 static rtx
2523 expand_builtin_sincos (tree exp)
2525 rtx op0, op1, op2, target1, target2;
2526 machine_mode mode;
2527 tree arg, sinp, cosp;
2528 int result;
2529 location_t loc = EXPR_LOCATION (exp);
2530 tree alias_type, alias_off;
2532 if (!validate_arglist (exp, REAL_TYPE,
2533 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2534 return NULL_RTX;
2536 arg = CALL_EXPR_ARG (exp, 0);
2537 sinp = CALL_EXPR_ARG (exp, 1);
2538 cosp = CALL_EXPR_ARG (exp, 2);
2540 /* Make a suitable register to place result in. */
2541 mode = TYPE_MODE (TREE_TYPE (arg));
2543 /* Check if sincos insn is available, otherwise emit the call. */
2544 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2545 return NULL_RTX;
2547 target1 = gen_reg_rtx (mode);
2548 target2 = gen_reg_rtx (mode);
2550 op0 = expand_normal (arg);
2551 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2552 alias_off = build_int_cst (alias_type, 0);
2553 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2554 sinp, alias_off));
2555 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2556 cosp, alias_off));
2558 /* Compute into target1 and target2.
2559 Set TARGET to wherever the result comes back. */
2560 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2561 gcc_assert (result);
2563 /* Move target1 and target2 to the memory locations indicated
2564 by op1 and op2. */
2565 emit_move_insn (op1, target1);
2566 emit_move_insn (op2, target2);
2568 return const0_rtx;
2571 /* Expand a call to the internal cexpi builtin to the sincos math function.
2572 EXP is the expression that is a call to the builtin function; if convenient,
2573 the result should be placed in TARGET. */
2575 static rtx
2576 expand_builtin_cexpi (tree exp, rtx target)
2578 tree fndecl = get_callee_fndecl (exp);
2579 tree arg, type;
2580 machine_mode mode;
2581 rtx op0, op1, op2;
2582 location_t loc = EXPR_LOCATION (exp);
2584 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2585 return NULL_RTX;
2587 arg = CALL_EXPR_ARG (exp, 0);
2588 type = TREE_TYPE (arg);
2589 mode = TYPE_MODE (TREE_TYPE (arg));
2591 /* Try expanding via a sincos optab, fall back to emitting a libcall
2592 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2593 is only generated from sincos, cexp or if we have either of them. */
2594 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2596 op1 = gen_reg_rtx (mode);
2597 op2 = gen_reg_rtx (mode);
2599 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2601 /* Compute into op1 and op2. */
2602 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2604 else if (targetm.libc_has_function (function_sincos))
2606 tree call, fn = NULL_TREE;
2607 tree top1, top2;
2608 rtx op1a, op2a;
2610 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2611 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2612 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2613 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2614 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2615 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2616 else
2617 gcc_unreachable ();
2619 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2620 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2621 op1a = copy_addr_to_reg (XEXP (op1, 0));
2622 op2a = copy_addr_to_reg (XEXP (op2, 0));
2623 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2624 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2626 /* Make sure not to fold the sincos call again. */
2627 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2628 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2629 call, 3, arg, top1, top2));
2631 else
2633 tree call, fn = NULL_TREE, narg;
2634 tree ctype = build_complex_type (type);
2636 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2637 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2638 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2639 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2640 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2641 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2642 else
2643 gcc_unreachable ();
2645 /* If we don't have a decl for cexp create one. This is the
2646 friendliest fallback if the user calls __builtin_cexpi
2647 without full target C99 function support. */
2648 if (fn == NULL_TREE)
2650 tree fntype;
2651 const char *name = NULL;
2653 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2654 name = "cexpf";
2655 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2656 name = "cexp";
2657 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2658 name = "cexpl";
2660 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2661 fn = build_fn_decl (name, fntype);
2664 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2665 build_real (type, dconst0), arg);
2667 /* Make sure not to fold the cexp call again. */
2668 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2669 return expand_expr (build_call_nary (ctype, call, 1, narg),
2670 target, VOIDmode, EXPAND_NORMAL);
2673 /* Now build the proper return type. */
2674 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2675 make_tree (TREE_TYPE (arg), op2),
2676 make_tree (TREE_TYPE (arg), op1)),
2677 target, VOIDmode, EXPAND_NORMAL);
2680 /* Conveniently construct a function call expression. FNDECL names the
2681 function to be called, N is the number of arguments, and the "..."
2682 parameters are the argument expressions. Unlike build_call_exr
2683 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2685 static tree
2686 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2688 va_list ap;
2689 tree fntype = TREE_TYPE (fndecl);
2690 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2692 va_start (ap, n);
2693 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2694 va_end (ap);
2695 SET_EXPR_LOCATION (fn, loc);
2696 return fn;
2699 /* Expand a call to one of the builtin rounding functions gcc defines
2700 as an extension (lfloor and lceil). As these are gcc extensions we
2701 do not need to worry about setting errno to EDOM.
2702 If expanding via optab fails, lower expression to (int)(floor(x)).
2703 EXP is the expression that is a call to the builtin function;
2704 if convenient, the result should be placed in TARGET. */
2706 static rtx
2707 expand_builtin_int_roundingfn (tree exp, rtx target)
2709 convert_optab builtin_optab;
2710 rtx op0, tmp;
2711 rtx_insn *insns;
2712 tree fndecl = get_callee_fndecl (exp);
2713 enum built_in_function fallback_fn;
2714 tree fallback_fndecl;
2715 machine_mode mode;
2716 tree arg;
2718 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2719 gcc_unreachable ();
2721 arg = CALL_EXPR_ARG (exp, 0);
2723 switch (DECL_FUNCTION_CODE (fndecl))
2725 CASE_FLT_FN (BUILT_IN_ICEIL):
2726 CASE_FLT_FN (BUILT_IN_LCEIL):
2727 CASE_FLT_FN (BUILT_IN_LLCEIL):
2728 builtin_optab = lceil_optab;
2729 fallback_fn = BUILT_IN_CEIL;
2730 break;
2732 CASE_FLT_FN (BUILT_IN_IFLOOR):
2733 CASE_FLT_FN (BUILT_IN_LFLOOR):
2734 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2735 builtin_optab = lfloor_optab;
2736 fallback_fn = BUILT_IN_FLOOR;
2737 break;
2739 default:
2740 gcc_unreachable ();
2743 /* Make a suitable register to place result in. */
2744 mode = TYPE_MODE (TREE_TYPE (exp));
2746 target = gen_reg_rtx (mode);
2748 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2749 need to expand the argument again. This way, we will not perform
2750 side-effects more the once. */
2751 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2753 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2755 start_sequence ();
2757 /* Compute into TARGET. */
2758 if (expand_sfix_optab (target, op0, builtin_optab))
2760 /* Output the entire sequence. */
2761 insns = get_insns ();
2762 end_sequence ();
2763 emit_insn (insns);
2764 return target;
2767 /* If we were unable to expand via the builtin, stop the sequence
2768 (without outputting the insns). */
2769 end_sequence ();
2771 /* Fall back to floating point rounding optab. */
2772 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2774 /* For non-C99 targets we may end up without a fallback fndecl here
2775 if the user called __builtin_lfloor directly. In this case emit
2776 a call to the floor/ceil variants nevertheless. This should result
2777 in the best user experience for not full C99 targets. */
2778 if (fallback_fndecl == NULL_TREE)
2780 tree fntype;
2781 const char *name = NULL;
2783 switch (DECL_FUNCTION_CODE (fndecl))
2785 case BUILT_IN_ICEIL:
2786 case BUILT_IN_LCEIL:
2787 case BUILT_IN_LLCEIL:
2788 name = "ceil";
2789 break;
2790 case BUILT_IN_ICEILF:
2791 case BUILT_IN_LCEILF:
2792 case BUILT_IN_LLCEILF:
2793 name = "ceilf";
2794 break;
2795 case BUILT_IN_ICEILL:
2796 case BUILT_IN_LCEILL:
2797 case BUILT_IN_LLCEILL:
2798 name = "ceill";
2799 break;
2800 case BUILT_IN_IFLOOR:
2801 case BUILT_IN_LFLOOR:
2802 case BUILT_IN_LLFLOOR:
2803 name = "floor";
2804 break;
2805 case BUILT_IN_IFLOORF:
2806 case BUILT_IN_LFLOORF:
2807 case BUILT_IN_LLFLOORF:
2808 name = "floorf";
2809 break;
2810 case BUILT_IN_IFLOORL:
2811 case BUILT_IN_LFLOORL:
2812 case BUILT_IN_LLFLOORL:
2813 name = "floorl";
2814 break;
2815 default:
2816 gcc_unreachable ();
2819 fntype = build_function_type_list (TREE_TYPE (arg),
2820 TREE_TYPE (arg), NULL_TREE);
2821 fallback_fndecl = build_fn_decl (name, fntype);
2824 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2826 tmp = expand_normal (exp);
2827 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2829 /* Truncate the result of floating point optab to integer
2830 via expand_fix (). */
2831 target = gen_reg_rtx (mode);
2832 expand_fix (target, tmp, 0);
2834 return target;
2837 /* Expand a call to one of the builtin math functions doing integer
2838 conversion (lrint).
2839 Return 0 if a normal call should be emitted rather than expanding the
2840 function in-line. EXP is the expression that is a call to the builtin
2841 function; if convenient, the result should be placed in TARGET. */
2843 static rtx
2844 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2846 convert_optab builtin_optab;
2847 rtx op0;
2848 rtx_insn *insns;
2849 tree fndecl = get_callee_fndecl (exp);
2850 tree arg;
2851 machine_mode mode;
2852 enum built_in_function fallback_fn = BUILT_IN_NONE;
2854 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2855 gcc_unreachable ();
2857 arg = CALL_EXPR_ARG (exp, 0);
2859 switch (DECL_FUNCTION_CODE (fndecl))
2861 CASE_FLT_FN (BUILT_IN_IRINT):
2862 fallback_fn = BUILT_IN_LRINT;
2863 /* FALLTHRU */
2864 CASE_FLT_FN (BUILT_IN_LRINT):
2865 CASE_FLT_FN (BUILT_IN_LLRINT):
2866 builtin_optab = lrint_optab;
2867 break;
2869 CASE_FLT_FN (BUILT_IN_IROUND):
2870 fallback_fn = BUILT_IN_LROUND;
2871 /* FALLTHRU */
2872 CASE_FLT_FN (BUILT_IN_LROUND):
2873 CASE_FLT_FN (BUILT_IN_LLROUND):
2874 builtin_optab = lround_optab;
2875 break;
2877 default:
2878 gcc_unreachable ();
2881 /* There's no easy way to detect the case we need to set EDOM. */
2882 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2883 return NULL_RTX;
2885 /* Make a suitable register to place result in. */
2886 mode = TYPE_MODE (TREE_TYPE (exp));
2888 /* There's no easy way to detect the case we need to set EDOM. */
2889 if (!flag_errno_math)
2891 rtx result = gen_reg_rtx (mode);
2893 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2894 need to expand the argument again. This way, we will not perform
2895 side-effects more the once. */
2896 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2898 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2900 start_sequence ();
2902 if (expand_sfix_optab (result, op0, builtin_optab))
2904 /* Output the entire sequence. */
2905 insns = get_insns ();
2906 end_sequence ();
2907 emit_insn (insns);
2908 return result;
2911 /* If we were unable to expand via the builtin, stop the sequence
2912 (without outputting the insns) and call to the library function
2913 with the stabilized argument list. */
2914 end_sequence ();
2917 if (fallback_fn != BUILT_IN_NONE)
2919 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2920 targets, (int) round (x) should never be transformed into
2921 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2922 a call to lround in the hope that the target provides at least some
2923 C99 functions. This should result in the best user experience for
2924 not full C99 targets. */
2925 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2926 fallback_fn, 0);
2928 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2929 fallback_fndecl, 1, arg);
2931 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2932 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2933 return convert_to_mode (mode, target, 0);
2936 return expand_call (exp, target, target == const0_rtx);
2939 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2940 a normal call should be emitted rather than expanding the function
2941 in-line. EXP is the expression that is a call to the builtin
2942 function; if convenient, the result should be placed in TARGET. */
2944 static rtx
2945 expand_builtin_powi (tree exp, rtx target)
2947 tree arg0, arg1;
2948 rtx op0, op1;
2949 machine_mode mode;
2950 machine_mode mode2;
2952 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2953 return NULL_RTX;
2955 arg0 = CALL_EXPR_ARG (exp, 0);
2956 arg1 = CALL_EXPR_ARG (exp, 1);
2957 mode = TYPE_MODE (TREE_TYPE (exp));
2959 /* Emit a libcall to libgcc. */
2961 /* Mode of the 2nd argument must match that of an int. */
2962 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2964 if (target == NULL_RTX)
2965 target = gen_reg_rtx (mode);
2967 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2968 if (GET_MODE (op0) != mode)
2969 op0 = convert_to_mode (mode, op0, 0);
2970 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2971 if (GET_MODE (op1) != mode2)
2972 op1 = convert_to_mode (mode2, op1, 0);
2974 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2975 target, LCT_CONST, mode, 2,
2976 op0, mode, op1, mode2);
2978 return target;
2981 /* Expand expression EXP which is a call to the strlen builtin. Return
2982 NULL_RTX if we failed the caller should emit a normal call, otherwise
2983 try to get the result in TARGET, if convenient. */
2985 static rtx
2986 expand_builtin_strlen (tree exp, rtx target,
2987 machine_mode target_mode)
2989 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2990 return NULL_RTX;
2991 else
2993 struct expand_operand ops[4];
2994 rtx pat;
2995 tree len;
2996 tree src = CALL_EXPR_ARG (exp, 0);
2997 rtx src_reg;
2998 rtx_insn *before_strlen;
2999 machine_mode insn_mode = target_mode;
3000 enum insn_code icode = CODE_FOR_nothing;
3001 unsigned int align;
3003 /* If the length can be computed at compile-time, return it. */
3004 len = c_strlen (src, 0);
3005 if (len)
3006 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3008 /* If the length can be computed at compile-time and is constant
3009 integer, but there are side-effects in src, evaluate
3010 src for side-effects, then return len.
3011 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3012 can be optimized into: i++; x = 3; */
3013 len = c_strlen (src, 1);
3014 if (len && TREE_CODE (len) == INTEGER_CST)
3016 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3017 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3020 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3022 /* If SRC is not a pointer type, don't do this operation inline. */
3023 if (align == 0)
3024 return NULL_RTX;
3026 /* Bail out if we can't compute strlen in the right mode. */
3027 while (insn_mode != VOIDmode)
3029 icode = optab_handler (strlen_optab, insn_mode);
3030 if (icode != CODE_FOR_nothing)
3031 break;
3033 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3035 if (insn_mode == VOIDmode)
3036 return NULL_RTX;
3038 /* Make a place to hold the source address. We will not expand
3039 the actual source until we are sure that the expansion will
3040 not fail -- there are trees that cannot be expanded twice. */
3041 src_reg = gen_reg_rtx (Pmode);
3043 /* Mark the beginning of the strlen sequence so we can emit the
3044 source operand later. */
3045 before_strlen = get_last_insn ();
3047 create_output_operand (&ops[0], target, insn_mode);
3048 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3049 create_integer_operand (&ops[2], 0);
3050 create_integer_operand (&ops[3], align);
3051 if (!maybe_expand_insn (icode, 4, ops))
3052 return NULL_RTX;
3054 /* Now that we are assured of success, expand the source. */
3055 start_sequence ();
3056 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3057 if (pat != src_reg)
3059 #ifdef POINTERS_EXTEND_UNSIGNED
3060 if (GET_MODE (pat) != Pmode)
3061 pat = convert_to_mode (Pmode, pat,
3062 POINTERS_EXTEND_UNSIGNED);
3063 #endif
3064 emit_move_insn (src_reg, pat);
3066 pat = get_insns ();
3067 end_sequence ();
3069 if (before_strlen)
3070 emit_insn_after (pat, before_strlen);
3071 else
3072 emit_insn_before (pat, get_insns ());
3074 /* Return the value in the proper mode for this function. */
3075 if (GET_MODE (ops[0].value) == target_mode)
3076 target = ops[0].value;
3077 else if (target != 0)
3078 convert_move (target, ops[0].value, 0);
3079 else
3080 target = convert_to_mode (target_mode, ops[0].value, 0);
3082 return target;
3086 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3087 bytes from constant string DATA + OFFSET and return it as target
3088 constant. */
3090 static rtx
3091 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3092 machine_mode mode)
3094 const char *str = (const char *) data;
3096 gcc_assert (offset >= 0
3097 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3098 <= strlen (str) + 1));
3100 return c_readstr (str + offset, mode);
3103 /* LEN specify length of the block of memcpy/memset operation.
3104 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3105 In some cases we can make very likely guess on max size, then we
3106 set it into PROBABLE_MAX_SIZE. */
3108 static void
3109 determine_block_size (tree len, rtx len_rtx,
3110 unsigned HOST_WIDE_INT *min_size,
3111 unsigned HOST_WIDE_INT *max_size,
3112 unsigned HOST_WIDE_INT *probable_max_size)
3114 if (CONST_INT_P (len_rtx))
3116 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3117 return;
3119 else
3121 wide_int min, max;
3122 enum value_range_type range_type = VR_UNDEFINED;
3124 /* Determine bounds from the type. */
3125 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3126 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3127 else
3128 *min_size = 0;
3129 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3130 *probable_max_size = *max_size
3131 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3132 else
3133 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3135 if (TREE_CODE (len) == SSA_NAME)
3136 range_type = get_range_info (len, &min, &max);
3137 if (range_type == VR_RANGE)
3139 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3140 *min_size = min.to_uhwi ();
3141 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3142 *probable_max_size = *max_size = max.to_uhwi ();
3144 else if (range_type == VR_ANTI_RANGE)
3146 /* Anti range 0...N lets us to determine minimal size to N+1. */
3147 if (min == 0)
3149 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3150 *min_size = max.to_uhwi () + 1;
3152 /* Code like
3154 int n;
3155 if (n < 100)
3156 memcpy (a, b, n)
3158 Produce anti range allowing negative values of N. We still
3159 can use the information and make a guess that N is not negative.
3161 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3162 *probable_max_size = min.to_uhwi () - 1;
3165 gcc_checking_assert (*max_size <=
3166 (unsigned HOST_WIDE_INT)
3167 GET_MODE_MASK (GET_MODE (len_rtx)));
3170 /* Helper function to do the actual work for expand_builtin_memcpy. */
3172 static rtx
3173 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3175 const char *src_str;
3176 unsigned int src_align = get_pointer_alignment (src);
3177 unsigned int dest_align = get_pointer_alignment (dest);
3178 rtx dest_mem, src_mem, dest_addr, len_rtx;
3179 HOST_WIDE_INT expected_size = -1;
3180 unsigned int expected_align = 0;
3181 unsigned HOST_WIDE_INT min_size;
3182 unsigned HOST_WIDE_INT max_size;
3183 unsigned HOST_WIDE_INT probable_max_size;
3185 /* If DEST is not a pointer type, call the normal function. */
3186 if (dest_align == 0)
3187 return NULL_RTX;
3189 /* If either SRC is not a pointer type, don't do this
3190 operation in-line. */
3191 if (src_align == 0)
3192 return NULL_RTX;
3194 if (currently_expanding_gimple_stmt)
3195 stringop_block_profile (currently_expanding_gimple_stmt,
3196 &expected_align, &expected_size);
3198 if (expected_align < dest_align)
3199 expected_align = dest_align;
3200 dest_mem = get_memory_rtx (dest, len);
3201 set_mem_align (dest_mem, dest_align);
3202 len_rtx = expand_normal (len);
3203 determine_block_size (len, len_rtx, &min_size, &max_size,
3204 &probable_max_size);
3205 src_str = c_getstr (src);
3207 /* If SRC is a string constant and block move would be done
3208 by pieces, we can avoid loading the string from memory
3209 and only stored the computed constants. */
3210 if (src_str
3211 && CONST_INT_P (len_rtx)
3212 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3213 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3214 CONST_CAST (char *, src_str),
3215 dest_align, false))
3217 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3218 builtin_memcpy_read_str,
3219 CONST_CAST (char *, src_str),
3220 dest_align, false, 0);
3221 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3222 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3223 return dest_mem;
3226 src_mem = get_memory_rtx (src, len);
3227 set_mem_align (src_mem, src_align);
3229 /* Copy word part most expediently. */
3230 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3231 CALL_EXPR_TAILCALL (exp)
3232 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3233 expected_align, expected_size,
3234 min_size, max_size, probable_max_size);
3236 if (dest_addr == 0)
3238 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3239 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3242 return dest_addr;
3245 /* Expand a call EXP to the memcpy builtin.
3246 Return NULL_RTX if we failed, the caller should emit a normal call,
3247 otherwise try to get the result in TARGET, if convenient (and in
3248 mode MODE if that's convenient). */
3250 static rtx
3251 expand_builtin_memcpy (tree exp, rtx target)
3253 if (!validate_arglist (exp,
3254 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3255 return NULL_RTX;
3256 else
3258 tree dest = CALL_EXPR_ARG (exp, 0);
3259 tree src = CALL_EXPR_ARG (exp, 1);
3260 tree len = CALL_EXPR_ARG (exp, 2);
3261 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3265 /* Expand an instrumented call EXP to the memcpy builtin.
3266 Return NULL_RTX if we failed, the caller should emit a normal call,
3267 otherwise try to get the result in TARGET, if convenient (and in
3268 mode MODE if that's convenient). */
3270 static rtx
3271 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3273 if (!validate_arglist (exp,
3274 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3275 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3276 INTEGER_TYPE, VOID_TYPE))
3277 return NULL_RTX;
3278 else
3280 tree dest = CALL_EXPR_ARG (exp, 0);
3281 tree src = CALL_EXPR_ARG (exp, 2);
3282 tree len = CALL_EXPR_ARG (exp, 4);
3283 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3285 /* Return src bounds with the result. */
3286 if (res)
3288 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3289 expand_normal (CALL_EXPR_ARG (exp, 1)));
3290 res = chkp_join_splitted_slot (res, bnd);
3292 return res;
3296 /* Expand a call EXP to the mempcpy builtin.
3297 Return NULL_RTX if we failed; the caller should emit a normal call,
3298 otherwise try to get the result in TARGET, if convenient (and in
3299 mode MODE if that's convenient). If ENDP is 0 return the
3300 destination pointer, if ENDP is 1 return the end pointer ala
3301 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3302 stpcpy. */
3304 static rtx
3305 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3307 if (!validate_arglist (exp,
3308 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3309 return NULL_RTX;
3310 else
3312 tree dest = CALL_EXPR_ARG (exp, 0);
3313 tree src = CALL_EXPR_ARG (exp, 1);
3314 tree len = CALL_EXPR_ARG (exp, 2);
3315 return expand_builtin_mempcpy_args (dest, src, len,
3316 target, mode, /*endp=*/ 1,
3317 exp);
3321 /* Expand an instrumented call EXP to the mempcpy builtin.
3322 Return NULL_RTX if we failed, the caller should emit a normal call,
3323 otherwise try to get the result in TARGET, if convenient (and in
3324 mode MODE if that's convenient). */
3326 static rtx
3327 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3329 if (!validate_arglist (exp,
3330 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3331 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3332 INTEGER_TYPE, VOID_TYPE))
3333 return NULL_RTX;
3334 else
3336 tree dest = CALL_EXPR_ARG (exp, 0);
3337 tree src = CALL_EXPR_ARG (exp, 2);
3338 tree len = CALL_EXPR_ARG (exp, 4);
3339 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3340 mode, 1, exp);
3342 /* Return src bounds with the result. */
3343 if (res)
3345 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3346 expand_normal (CALL_EXPR_ARG (exp, 1)));
3347 res = chkp_join_splitted_slot (res, bnd);
3349 return res;
3353 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3354 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3355 so that this can also be called without constructing an actual CALL_EXPR.
3356 The other arguments and return value are the same as for
3357 expand_builtin_mempcpy. */
3359 static rtx
3360 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3361 rtx target, machine_mode mode, int endp,
3362 tree orig_exp)
3364 tree fndecl = get_callee_fndecl (orig_exp);
3366 /* If return value is ignored, transform mempcpy into memcpy. */
3367 if (target == const0_rtx
3368 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3369 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3371 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3372 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3373 dest, src, len);
3374 return expand_expr (result, target, mode, EXPAND_NORMAL);
3376 else if (target == const0_rtx
3377 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3379 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3380 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3381 dest, src, len);
3382 return expand_expr (result, target, mode, EXPAND_NORMAL);
3384 else
3386 const char *src_str;
3387 unsigned int src_align = get_pointer_alignment (src);
3388 unsigned int dest_align = get_pointer_alignment (dest);
3389 rtx dest_mem, src_mem, len_rtx;
3391 /* If either SRC or DEST is not a pointer type, don't do this
3392 operation in-line. */
3393 if (dest_align == 0 || src_align == 0)
3394 return NULL_RTX;
3396 /* If LEN is not constant, call the normal function. */
3397 if (! tree_fits_uhwi_p (len))
3398 return NULL_RTX;
3400 len_rtx = expand_normal (len);
3401 src_str = c_getstr (src);
3403 /* If SRC is a string constant and block move would be done
3404 by pieces, we can avoid loading the string from memory
3405 and only stored the computed constants. */
3406 if (src_str
3407 && CONST_INT_P (len_rtx)
3408 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3409 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3410 CONST_CAST (char *, src_str),
3411 dest_align, false))
3413 dest_mem = get_memory_rtx (dest, len);
3414 set_mem_align (dest_mem, dest_align);
3415 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3416 builtin_memcpy_read_str,
3417 CONST_CAST (char *, src_str),
3418 dest_align, false, endp);
3419 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3420 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3421 return dest_mem;
3424 if (CONST_INT_P (len_rtx)
3425 && can_move_by_pieces (INTVAL (len_rtx),
3426 MIN (dest_align, src_align)))
3428 dest_mem = get_memory_rtx (dest, len);
3429 set_mem_align (dest_mem, dest_align);
3430 src_mem = get_memory_rtx (src, len);
3431 set_mem_align (src_mem, src_align);
3432 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3433 MIN (dest_align, src_align), endp);
3434 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3435 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3436 return dest_mem;
3439 return NULL_RTX;
3443 #ifndef HAVE_movstr
3444 # define HAVE_movstr 0
3445 # define CODE_FOR_movstr CODE_FOR_nothing
3446 #endif
3448 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3449 we failed, the caller should emit a normal call, otherwise try to
3450 get the result in TARGET, if convenient. If ENDP is 0 return the
3451 destination pointer, if ENDP is 1 return the end pointer ala
3452 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3453 stpcpy. */
3455 static rtx
3456 expand_movstr (tree dest, tree src, rtx target, int endp)
3458 struct expand_operand ops[3];
3459 rtx dest_mem;
3460 rtx src_mem;
3462 if (!HAVE_movstr)
3463 return NULL_RTX;
3465 dest_mem = get_memory_rtx (dest, NULL);
3466 src_mem = get_memory_rtx (src, NULL);
3467 if (!endp)
3469 target = force_reg (Pmode, XEXP (dest_mem, 0));
3470 dest_mem = replace_equiv_address (dest_mem, target);
3473 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3474 create_fixed_operand (&ops[1], dest_mem);
3475 create_fixed_operand (&ops[2], src_mem);
3476 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3477 return NULL_RTX;
3479 if (endp && target != const0_rtx)
3481 target = ops[0].value;
3482 /* movstr is supposed to set end to the address of the NUL
3483 terminator. If the caller requested a mempcpy-like return value,
3484 adjust it. */
3485 if (endp == 1)
3487 rtx tem = plus_constant (GET_MODE (target),
3488 gen_lowpart (GET_MODE (target), target), 1);
3489 emit_move_insn (target, force_operand (tem, NULL_RTX));
3492 return target;
3495 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3496 NULL_RTX if we failed the caller should emit a normal call, otherwise
3497 try to get the result in TARGET, if convenient (and in mode MODE if that's
3498 convenient). */
3500 static rtx
3501 expand_builtin_strcpy (tree exp, rtx target)
3503 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3505 tree dest = CALL_EXPR_ARG (exp, 0);
3506 tree src = CALL_EXPR_ARG (exp, 1);
3507 return expand_builtin_strcpy_args (dest, src, target);
3509 return NULL_RTX;
3512 /* Helper function to do the actual work for expand_builtin_strcpy. The
3513 arguments to the builtin_strcpy call DEST and SRC are broken out
3514 so that this can also be called without constructing an actual CALL_EXPR.
3515 The other arguments and return value are the same as for
3516 expand_builtin_strcpy. */
3518 static rtx
3519 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3521 return expand_movstr (dest, src, target, /*endp=*/0);
3524 /* Expand a call EXP to the stpcpy builtin.
3525 Return NULL_RTX if we failed the caller should emit a normal call,
3526 otherwise try to get the result in TARGET, if convenient (and in
3527 mode MODE if that's convenient). */
3529 static rtx
3530 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3532 tree dst, src;
3533 location_t loc = EXPR_LOCATION (exp);
3535 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3536 return NULL_RTX;
3538 dst = CALL_EXPR_ARG (exp, 0);
3539 src = CALL_EXPR_ARG (exp, 1);
3541 /* If return value is ignored, transform stpcpy into strcpy. */
3542 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3544 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3545 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3546 return expand_expr (result, target, mode, EXPAND_NORMAL);
3548 else
3550 tree len, lenp1;
3551 rtx ret;
3553 /* Ensure we get an actual string whose length can be evaluated at
3554 compile-time, not an expression containing a string. This is
3555 because the latter will potentially produce pessimized code
3556 when used to produce the return value. */
3557 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3558 return expand_movstr (dst, src, target, /*endp=*/2);
3560 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3561 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3562 target, mode, /*endp=*/2,
3563 exp);
3565 if (ret)
3566 return ret;
3568 if (TREE_CODE (len) == INTEGER_CST)
3570 rtx len_rtx = expand_normal (len);
3572 if (CONST_INT_P (len_rtx))
3574 ret = expand_builtin_strcpy_args (dst, src, target);
3576 if (ret)
3578 if (! target)
3580 if (mode != VOIDmode)
3581 target = gen_reg_rtx (mode);
3582 else
3583 target = gen_reg_rtx (GET_MODE (ret));
3585 if (GET_MODE (target) != GET_MODE (ret))
3586 ret = gen_lowpart (GET_MODE (target), ret);
3588 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3589 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3590 gcc_assert (ret);
3592 return target;
3597 return expand_movstr (dst, src, target, /*endp=*/2);
3601 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3602 bytes from constant string DATA + OFFSET and return it as target
3603 constant. */
3606 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3607 machine_mode mode)
3609 const char *str = (const char *) data;
3611 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3612 return const0_rtx;
3614 return c_readstr (str + offset, mode);
3617 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3618 NULL_RTX if we failed the caller should emit a normal call. */
3620 static rtx
3621 expand_builtin_strncpy (tree exp, rtx target)
3623 location_t loc = EXPR_LOCATION (exp);
3625 if (validate_arglist (exp,
3626 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3628 tree dest = CALL_EXPR_ARG (exp, 0);
3629 tree src = CALL_EXPR_ARG (exp, 1);
3630 tree len = CALL_EXPR_ARG (exp, 2);
3631 tree slen = c_strlen (src, 1);
3633 /* We must be passed a constant len and src parameter. */
3634 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3635 return NULL_RTX;
3637 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3639 /* We're required to pad with trailing zeros if the requested
3640 len is greater than strlen(s2)+1. In that case try to
3641 use store_by_pieces, if it fails, punt. */
3642 if (tree_int_cst_lt (slen, len))
3644 unsigned int dest_align = get_pointer_alignment (dest);
3645 const char *p = c_getstr (src);
3646 rtx dest_mem;
3648 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3649 || !can_store_by_pieces (tree_to_uhwi (len),
3650 builtin_strncpy_read_str,
3651 CONST_CAST (char *, p),
3652 dest_align, false))
3653 return NULL_RTX;
3655 dest_mem = get_memory_rtx (dest, len);
3656 store_by_pieces (dest_mem, tree_to_uhwi (len),
3657 builtin_strncpy_read_str,
3658 CONST_CAST (char *, p), dest_align, false, 0);
3659 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3660 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3661 return dest_mem;
3664 return NULL_RTX;
3667 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3668 bytes from constant string DATA + OFFSET and return it as target
3669 constant. */
3672 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3673 machine_mode mode)
3675 const char *c = (const char *) data;
3676 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3678 memset (p, *c, GET_MODE_SIZE (mode));
3680 return c_readstr (p, mode);
3683 /* Callback routine for store_by_pieces. Return the RTL of a register
3684 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3685 char value given in the RTL register data. For example, if mode is
3686 4 bytes wide, return the RTL for 0x01010101*data. */
3688 static rtx
3689 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3690 machine_mode mode)
3692 rtx target, coeff;
3693 size_t size;
3694 char *p;
3696 size = GET_MODE_SIZE (mode);
3697 if (size == 1)
3698 return (rtx) data;
3700 p = XALLOCAVEC (char, size);
3701 memset (p, 1, size);
3702 coeff = c_readstr (p, mode);
3704 target = convert_to_mode (mode, (rtx) data, 1);
3705 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3706 return force_reg (mode, target);
3709 /* Expand expression EXP, which is a call to the memset builtin. Return
3710 NULL_RTX if we failed the caller should emit a normal call, otherwise
3711 try to get the result in TARGET, if convenient (and in mode MODE if that's
3712 convenient). */
3714 static rtx
3715 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3717 if (!validate_arglist (exp,
3718 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3719 return NULL_RTX;
3720 else
3722 tree dest = CALL_EXPR_ARG (exp, 0);
3723 tree val = CALL_EXPR_ARG (exp, 1);
3724 tree len = CALL_EXPR_ARG (exp, 2);
3725 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3729 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3730 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3731 try to get the result in TARGET, if convenient (and in mode MODE if that's
3732 convenient). */
3734 static rtx
3735 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3737 if (!validate_arglist (exp,
3738 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3739 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3740 return NULL_RTX;
3741 else
3743 tree dest = CALL_EXPR_ARG (exp, 0);
3744 tree val = CALL_EXPR_ARG (exp, 2);
3745 tree len = CALL_EXPR_ARG (exp, 3);
3746 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3748 /* Return src bounds with the result. */
3749 if (res)
3751 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3752 expand_normal (CALL_EXPR_ARG (exp, 1)));
3753 res = chkp_join_splitted_slot (res, bnd);
3755 return res;
3759 /* Helper function to do the actual work for expand_builtin_memset. The
3760 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3761 so that this can also be called without constructing an actual CALL_EXPR.
3762 The other arguments and return value are the same as for
3763 expand_builtin_memset. */
3765 static rtx
3766 expand_builtin_memset_args (tree dest, tree val, tree len,
3767 rtx target, machine_mode mode, tree orig_exp)
3769 tree fndecl, fn;
3770 enum built_in_function fcode;
3771 machine_mode val_mode;
3772 char c;
3773 unsigned int dest_align;
3774 rtx dest_mem, dest_addr, len_rtx;
3775 HOST_WIDE_INT expected_size = -1;
3776 unsigned int expected_align = 0;
3777 unsigned HOST_WIDE_INT min_size;
3778 unsigned HOST_WIDE_INT max_size;
3779 unsigned HOST_WIDE_INT probable_max_size;
3781 dest_align = get_pointer_alignment (dest);
3783 /* If DEST is not a pointer type, don't do this operation in-line. */
3784 if (dest_align == 0)
3785 return NULL_RTX;
3787 if (currently_expanding_gimple_stmt)
3788 stringop_block_profile (currently_expanding_gimple_stmt,
3789 &expected_align, &expected_size);
3791 if (expected_align < dest_align)
3792 expected_align = dest_align;
3794 /* If the LEN parameter is zero, return DEST. */
3795 if (integer_zerop (len))
3797 /* Evaluate and ignore VAL in case it has side-effects. */
3798 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3799 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3802 /* Stabilize the arguments in case we fail. */
3803 dest = builtin_save_expr (dest);
3804 val = builtin_save_expr (val);
3805 len = builtin_save_expr (len);
3807 len_rtx = expand_normal (len);
3808 determine_block_size (len, len_rtx, &min_size, &max_size,
3809 &probable_max_size);
3810 dest_mem = get_memory_rtx (dest, len);
3811 val_mode = TYPE_MODE (unsigned_char_type_node);
3813 if (TREE_CODE (val) != INTEGER_CST)
3815 rtx val_rtx;
3817 val_rtx = expand_normal (val);
3818 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3820 /* Assume that we can memset by pieces if we can store
3821 * the coefficients by pieces (in the required modes).
3822 * We can't pass builtin_memset_gen_str as that emits RTL. */
3823 c = 1;
3824 if (tree_fits_uhwi_p (len)
3825 && can_store_by_pieces (tree_to_uhwi (len),
3826 builtin_memset_read_str, &c, dest_align,
3827 true))
3829 val_rtx = force_reg (val_mode, val_rtx);
3830 store_by_pieces (dest_mem, tree_to_uhwi (len),
3831 builtin_memset_gen_str, val_rtx, dest_align,
3832 true, 0);
3834 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3835 dest_align, expected_align,
3836 expected_size, min_size, max_size,
3837 probable_max_size))
3838 goto do_libcall;
3840 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3841 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3842 return dest_mem;
3845 if (target_char_cast (val, &c))
3846 goto do_libcall;
3848 if (c)
3850 if (tree_fits_uhwi_p (len)
3851 && can_store_by_pieces (tree_to_uhwi (len),
3852 builtin_memset_read_str, &c, dest_align,
3853 true))
3854 store_by_pieces (dest_mem, tree_to_uhwi (len),
3855 builtin_memset_read_str, &c, dest_align, true, 0);
3856 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3857 gen_int_mode (c, val_mode),
3858 dest_align, expected_align,
3859 expected_size, min_size, max_size,
3860 probable_max_size))
3861 goto do_libcall;
3863 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3864 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3865 return dest_mem;
3868 set_mem_align (dest_mem, dest_align);
3869 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3870 CALL_EXPR_TAILCALL (orig_exp)
3871 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3872 expected_align, expected_size,
3873 min_size, max_size,
3874 probable_max_size);
3876 if (dest_addr == 0)
3878 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3879 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3882 return dest_addr;
3884 do_libcall:
3885 fndecl = get_callee_fndecl (orig_exp);
3886 fcode = DECL_FUNCTION_CODE (fndecl);
3887 if (fcode == BUILT_IN_MEMSET
3888 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3889 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3890 dest, val, len);
3891 else if (fcode == BUILT_IN_BZERO)
3892 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3893 dest, len);
3894 else
3895 gcc_unreachable ();
3896 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3897 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3898 return expand_call (fn, target, target == const0_rtx);
3901 /* Expand expression EXP, which is a call to the bzero builtin. Return
3902 NULL_RTX if we failed the caller should emit a normal call. */
3904 static rtx
3905 expand_builtin_bzero (tree exp)
3907 tree dest, size;
3908 location_t loc = EXPR_LOCATION (exp);
3910 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3911 return NULL_RTX;
3913 dest = CALL_EXPR_ARG (exp, 0);
3914 size = CALL_EXPR_ARG (exp, 1);
3916 /* New argument list transforming bzero(ptr x, int y) to
3917 memset(ptr x, int 0, size_t y). This is done this way
3918 so that if it isn't expanded inline, we fallback to
3919 calling bzero instead of memset. */
3921 return expand_builtin_memset_args (dest, integer_zero_node,
3922 fold_convert_loc (loc,
3923 size_type_node, size),
3924 const0_rtx, VOIDmode, exp);
3927 /* Expand expression EXP, which is a call to the memcmp built-in function.
3928 Return NULL_RTX if we failed and the caller should emit a normal call,
3929 otherwise try to get the result in TARGET, if convenient (and in mode
3930 MODE, if that's convenient). */
3932 static rtx
3933 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3934 ATTRIBUTE_UNUSED machine_mode mode)
3936 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3938 if (!validate_arglist (exp,
3939 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3940 return NULL_RTX;
3942 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3943 implementing memcmp because it will stop if it encounters two
3944 zero bytes. */
3945 #if defined HAVE_cmpmemsi
3947 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3948 rtx result;
3949 rtx insn;
3950 tree arg1 = CALL_EXPR_ARG (exp, 0);
3951 tree arg2 = CALL_EXPR_ARG (exp, 1);
3952 tree len = CALL_EXPR_ARG (exp, 2);
3954 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3955 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3956 machine_mode insn_mode;
3958 if (HAVE_cmpmemsi)
3959 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3960 else
3961 return NULL_RTX;
3963 /* If we don't have POINTER_TYPE, call the function. */
3964 if (arg1_align == 0 || arg2_align == 0)
3965 return NULL_RTX;
3967 /* Make a place to write the result of the instruction. */
3968 result = target;
3969 if (! (result != 0
3970 && REG_P (result) && GET_MODE (result) == insn_mode
3971 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3972 result = gen_reg_rtx (insn_mode);
3974 arg1_rtx = get_memory_rtx (arg1, len);
3975 arg2_rtx = get_memory_rtx (arg2, len);
3976 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3978 /* Set MEM_SIZE as appropriate. */
3979 if (CONST_INT_P (arg3_rtx))
3981 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3982 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3985 if (HAVE_cmpmemsi)
3986 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3987 GEN_INT (MIN (arg1_align, arg2_align)));
3988 else
3989 gcc_unreachable ();
3991 if (insn)
3992 emit_insn (insn);
3993 else
3994 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3995 TYPE_MODE (integer_type_node), 3,
3996 XEXP (arg1_rtx, 0), Pmode,
3997 XEXP (arg2_rtx, 0), Pmode,
3998 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3999 TYPE_UNSIGNED (sizetype)),
4000 TYPE_MODE (sizetype));
4002 /* Return the value in the proper mode for this function. */
4003 mode = TYPE_MODE (TREE_TYPE (exp));
4004 if (GET_MODE (result) == mode)
4005 return result;
4006 else if (target != 0)
4008 convert_move (target, result, 0);
4009 return target;
4011 else
4012 return convert_to_mode (mode, result, 0);
4014 #endif /* HAVE_cmpmemsi. */
4016 return NULL_RTX;
4019 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4020 if we failed the caller should emit a normal call, otherwise try to get
4021 the result in TARGET, if convenient. */
4023 static rtx
4024 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4026 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4027 return NULL_RTX;
4029 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4030 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4031 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4033 rtx arg1_rtx, arg2_rtx;
4034 rtx result, insn = NULL_RTX;
4035 tree fndecl, fn;
4036 tree arg1 = CALL_EXPR_ARG (exp, 0);
4037 tree arg2 = CALL_EXPR_ARG (exp, 1);
4039 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4040 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4042 /* If we don't have POINTER_TYPE, call the function. */
4043 if (arg1_align == 0 || arg2_align == 0)
4044 return NULL_RTX;
4046 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4047 arg1 = builtin_save_expr (arg1);
4048 arg2 = builtin_save_expr (arg2);
4050 arg1_rtx = get_memory_rtx (arg1, NULL);
4051 arg2_rtx = get_memory_rtx (arg2, NULL);
4053 #ifdef HAVE_cmpstrsi
4054 /* Try to call cmpstrsi. */
4055 if (HAVE_cmpstrsi)
4057 machine_mode insn_mode
4058 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4060 /* Make a place to write the result of the instruction. */
4061 result = target;
4062 if (! (result != 0
4063 && REG_P (result) && GET_MODE (result) == insn_mode
4064 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4065 result = gen_reg_rtx (insn_mode);
4067 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4068 GEN_INT (MIN (arg1_align, arg2_align)));
4070 #endif
4071 #ifdef HAVE_cmpstrnsi
4072 /* Try to determine at least one length and call cmpstrnsi. */
4073 if (!insn && HAVE_cmpstrnsi)
4075 tree len;
4076 rtx arg3_rtx;
4078 machine_mode insn_mode
4079 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4080 tree len1 = c_strlen (arg1, 1);
4081 tree len2 = c_strlen (arg2, 1);
4083 if (len1)
4084 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4085 if (len2)
4086 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4088 /* If we don't have a constant length for the first, use the length
4089 of the second, if we know it. We don't require a constant for
4090 this case; some cost analysis could be done if both are available
4091 but neither is constant. For now, assume they're equally cheap,
4092 unless one has side effects. If both strings have constant lengths,
4093 use the smaller. */
4095 if (!len1)
4096 len = len2;
4097 else if (!len2)
4098 len = len1;
4099 else if (TREE_SIDE_EFFECTS (len1))
4100 len = len2;
4101 else if (TREE_SIDE_EFFECTS (len2))
4102 len = len1;
4103 else if (TREE_CODE (len1) != INTEGER_CST)
4104 len = len2;
4105 else if (TREE_CODE (len2) != INTEGER_CST)
4106 len = len1;
4107 else if (tree_int_cst_lt (len1, len2))
4108 len = len1;
4109 else
4110 len = len2;
4112 /* If both arguments have side effects, we cannot optimize. */
4113 if (!len || TREE_SIDE_EFFECTS (len))
4114 goto do_libcall;
4116 arg3_rtx = expand_normal (len);
4118 /* Make a place to write the result of the instruction. */
4119 result = target;
4120 if (! (result != 0
4121 && REG_P (result) && GET_MODE (result) == insn_mode
4122 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4123 result = gen_reg_rtx (insn_mode);
4125 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4126 GEN_INT (MIN (arg1_align, arg2_align)));
4128 #endif
4130 if (insn)
4132 machine_mode mode;
4133 emit_insn (insn);
4135 /* Return the value in the proper mode for this function. */
4136 mode = TYPE_MODE (TREE_TYPE (exp));
4137 if (GET_MODE (result) == mode)
4138 return result;
4139 if (target == 0)
4140 return convert_to_mode (mode, result, 0);
4141 convert_move (target, result, 0);
4142 return target;
4145 /* Expand the library call ourselves using a stabilized argument
4146 list to avoid re-evaluating the function's arguments twice. */
4147 #ifdef HAVE_cmpstrnsi
4148 do_libcall:
4149 #endif
4150 fndecl = get_callee_fndecl (exp);
4151 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4152 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4153 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4154 return expand_call (fn, target, target == const0_rtx);
4156 #endif
4157 return NULL_RTX;
4160 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4161 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4162 the result in TARGET, if convenient. */
4164 static rtx
4165 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4166 ATTRIBUTE_UNUSED machine_mode mode)
4168 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4170 if (!validate_arglist (exp,
4171 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4172 return NULL_RTX;
4174 /* If c_strlen can determine an expression for one of the string
4175 lengths, and it doesn't have side effects, then emit cmpstrnsi
4176 using length MIN(strlen(string)+1, arg3). */
4177 #ifdef HAVE_cmpstrnsi
4178 if (HAVE_cmpstrnsi)
4180 tree len, len1, len2;
4181 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4182 rtx result, insn;
4183 tree fndecl, fn;
4184 tree arg1 = CALL_EXPR_ARG (exp, 0);
4185 tree arg2 = CALL_EXPR_ARG (exp, 1);
4186 tree arg3 = CALL_EXPR_ARG (exp, 2);
4188 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4189 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4190 machine_mode insn_mode
4191 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4193 len1 = c_strlen (arg1, 1);
4194 len2 = c_strlen (arg2, 1);
4196 if (len1)
4197 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4198 if (len2)
4199 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4201 /* If we don't have a constant length for the first, use the length
4202 of the second, if we know it. We don't require a constant for
4203 this case; some cost analysis could be done if both are available
4204 but neither is constant. For now, assume they're equally cheap,
4205 unless one has side effects. If both strings have constant lengths,
4206 use the smaller. */
4208 if (!len1)
4209 len = len2;
4210 else if (!len2)
4211 len = len1;
4212 else if (TREE_SIDE_EFFECTS (len1))
4213 len = len2;
4214 else if (TREE_SIDE_EFFECTS (len2))
4215 len = len1;
4216 else if (TREE_CODE (len1) != INTEGER_CST)
4217 len = len2;
4218 else if (TREE_CODE (len2) != INTEGER_CST)
4219 len = len1;
4220 else if (tree_int_cst_lt (len1, len2))
4221 len = len1;
4222 else
4223 len = len2;
4225 /* If both arguments have side effects, we cannot optimize. */
4226 if (!len || TREE_SIDE_EFFECTS (len))
4227 return NULL_RTX;
4229 /* The actual new length parameter is MIN(len,arg3). */
4230 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4231 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4233 /* If we don't have POINTER_TYPE, call the function. */
4234 if (arg1_align == 0 || arg2_align == 0)
4235 return NULL_RTX;
4237 /* Make a place to write the result of the instruction. */
4238 result = target;
4239 if (! (result != 0
4240 && REG_P (result) && GET_MODE (result) == insn_mode
4241 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4242 result = gen_reg_rtx (insn_mode);
4244 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4245 arg1 = builtin_save_expr (arg1);
4246 arg2 = builtin_save_expr (arg2);
4247 len = builtin_save_expr (len);
4249 arg1_rtx = get_memory_rtx (arg1, len);
4250 arg2_rtx = get_memory_rtx (arg2, len);
4251 arg3_rtx = expand_normal (len);
4252 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4253 GEN_INT (MIN (arg1_align, arg2_align)));
4254 if (insn)
4256 emit_insn (insn);
4258 /* Return the value in the proper mode for this function. */
4259 mode = TYPE_MODE (TREE_TYPE (exp));
4260 if (GET_MODE (result) == mode)
4261 return result;
4262 if (target == 0)
4263 return convert_to_mode (mode, result, 0);
4264 convert_move (target, result, 0);
4265 return target;
4268 /* Expand the library call ourselves using a stabilized argument
4269 list to avoid re-evaluating the function's arguments twice. */
4270 fndecl = get_callee_fndecl (exp);
4271 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4272 arg1, arg2, len);
4273 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4274 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4275 return expand_call (fn, target, target == const0_rtx);
4277 #endif
4278 return NULL_RTX;
4281 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4282 if that's convenient. */
4285 expand_builtin_saveregs (void)
4287 rtx val;
4288 rtx_insn *seq;
4290 /* Don't do __builtin_saveregs more than once in a function.
4291 Save the result of the first call and reuse it. */
4292 if (saveregs_value != 0)
4293 return saveregs_value;
4295 /* When this function is called, it means that registers must be
4296 saved on entry to this function. So we migrate the call to the
4297 first insn of this function. */
4299 start_sequence ();
4301 /* Do whatever the machine needs done in this case. */
4302 val = targetm.calls.expand_builtin_saveregs ();
4304 seq = get_insns ();
4305 end_sequence ();
4307 saveregs_value = val;
4309 /* Put the insns after the NOTE that starts the function. If this
4310 is inside a start_sequence, make the outer-level insn chain current, so
4311 the code is placed at the start of the function. */
4312 push_topmost_sequence ();
4313 emit_insn_after (seq, entry_of_function ());
4314 pop_topmost_sequence ();
4316 return val;
4319 /* Expand a call to __builtin_next_arg. */
4321 static rtx
4322 expand_builtin_next_arg (void)
4324 /* Checking arguments is already done in fold_builtin_next_arg
4325 that must be called before this function. */
4326 return expand_binop (ptr_mode, add_optab,
4327 crtl->args.internal_arg_pointer,
4328 crtl->args.arg_offset_rtx,
4329 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4332 /* Make it easier for the backends by protecting the valist argument
4333 from multiple evaluations. */
4335 static tree
4336 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4338 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4340 /* The current way of determining the type of valist is completely
4341 bogus. We should have the information on the va builtin instead. */
4342 if (!vatype)
4343 vatype = targetm.fn_abi_va_list (cfun->decl);
4345 if (TREE_CODE (vatype) == ARRAY_TYPE)
4347 if (TREE_SIDE_EFFECTS (valist))
4348 valist = save_expr (valist);
4350 /* For this case, the backends will be expecting a pointer to
4351 vatype, but it's possible we've actually been given an array
4352 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4353 So fix it. */
4354 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4356 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4357 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4360 else
4362 tree pt = build_pointer_type (vatype);
4364 if (! needs_lvalue)
4366 if (! TREE_SIDE_EFFECTS (valist))
4367 return valist;
4369 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4370 TREE_SIDE_EFFECTS (valist) = 1;
4373 if (TREE_SIDE_EFFECTS (valist))
4374 valist = save_expr (valist);
4375 valist = fold_build2_loc (loc, MEM_REF,
4376 vatype, valist, build_int_cst (pt, 0));
4379 return valist;
4382 /* The "standard" definition of va_list is void*. */
4384 tree
4385 std_build_builtin_va_list (void)
4387 return ptr_type_node;
4390 /* The "standard" abi va_list is va_list_type_node. */
4392 tree
4393 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4395 return va_list_type_node;
4398 /* The "standard" type of va_list is va_list_type_node. */
4400 tree
4401 std_canonical_va_list_type (tree type)
4403 tree wtype, htype;
4405 if (INDIRECT_REF_P (type))
4406 type = TREE_TYPE (type);
4407 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4408 type = TREE_TYPE (type);
4409 wtype = va_list_type_node;
4410 htype = type;
4411 /* Treat structure va_list types. */
4412 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4413 htype = TREE_TYPE (htype);
4414 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4416 /* If va_list is an array type, the argument may have decayed
4417 to a pointer type, e.g. by being passed to another function.
4418 In that case, unwrap both types so that we can compare the
4419 underlying records. */
4420 if (TREE_CODE (htype) == ARRAY_TYPE
4421 || POINTER_TYPE_P (htype))
4423 wtype = TREE_TYPE (wtype);
4424 htype = TREE_TYPE (htype);
4427 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4428 return va_list_type_node;
4430 return NULL_TREE;
4433 /* The "standard" implementation of va_start: just assign `nextarg' to
4434 the variable. */
4436 void
4437 std_expand_builtin_va_start (tree valist, rtx nextarg)
4439 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4440 convert_move (va_r, nextarg, 0);
4442 /* We do not have any valid bounds for the pointer, so
4443 just store zero bounds for it. */
4444 if (chkp_function_instrumented_p (current_function_decl))
4445 chkp_expand_bounds_reset_for_mem (valist,
4446 make_tree (TREE_TYPE (valist),
4447 nextarg));
4450 /* Expand EXP, a call to __builtin_va_start. */
4452 static rtx
4453 expand_builtin_va_start (tree exp)
4455 rtx nextarg;
4456 tree valist;
4457 location_t loc = EXPR_LOCATION (exp);
4459 if (call_expr_nargs (exp) < 2)
4461 error_at (loc, "too few arguments to function %<va_start%>");
4462 return const0_rtx;
4465 if (fold_builtin_next_arg (exp, true))
4466 return const0_rtx;
4468 nextarg = expand_builtin_next_arg ();
4469 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4471 if (targetm.expand_builtin_va_start)
4472 targetm.expand_builtin_va_start (valist, nextarg);
4473 else
4474 std_expand_builtin_va_start (valist, nextarg);
4476 return const0_rtx;
4479 /* Expand EXP, a call to __builtin_va_end. */
4481 static rtx
4482 expand_builtin_va_end (tree exp)
4484 tree valist = CALL_EXPR_ARG (exp, 0);
4486 /* Evaluate for side effects, if needed. I hate macros that don't
4487 do that. */
4488 if (TREE_SIDE_EFFECTS (valist))
4489 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4491 return const0_rtx;
4494 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4495 builtin rather than just as an assignment in stdarg.h because of the
4496 nastiness of array-type va_list types. */
4498 static rtx
4499 expand_builtin_va_copy (tree exp)
4501 tree dst, src, t;
4502 location_t loc = EXPR_LOCATION (exp);
4504 dst = CALL_EXPR_ARG (exp, 0);
4505 src = CALL_EXPR_ARG (exp, 1);
4507 dst = stabilize_va_list_loc (loc, dst, 1);
4508 src = stabilize_va_list_loc (loc, src, 0);
4510 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4512 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4514 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4515 TREE_SIDE_EFFECTS (t) = 1;
4516 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4518 else
4520 rtx dstb, srcb, size;
4522 /* Evaluate to pointers. */
4523 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4524 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4525 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4526 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4528 dstb = convert_memory_address (Pmode, dstb);
4529 srcb = convert_memory_address (Pmode, srcb);
4531 /* "Dereference" to BLKmode memories. */
4532 dstb = gen_rtx_MEM (BLKmode, dstb);
4533 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4534 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4535 srcb = gen_rtx_MEM (BLKmode, srcb);
4536 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4537 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4539 /* Copy. */
4540 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4543 return const0_rtx;
4546 /* Expand a call to one of the builtin functions __builtin_frame_address or
4547 __builtin_return_address. */
4549 static rtx
4550 expand_builtin_frame_address (tree fndecl, tree exp)
4552 /* The argument must be a nonnegative integer constant.
4553 It counts the number of frames to scan up the stack.
4554 The value is either the frame pointer value or the return
4555 address saved in that frame. */
4556 if (call_expr_nargs (exp) == 0)
4557 /* Warning about missing arg was already issued. */
4558 return const0_rtx;
4559 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4561 error ("invalid argument to %qD", fndecl);
4562 return const0_rtx;
4564 else
4566 /* Number of frames to scan up the stack. */
4567 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4569 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4571 /* Some ports cannot access arbitrary stack frames. */
4572 if (tem == NULL)
4574 warning (0, "unsupported argument to %qD", fndecl);
4575 return const0_rtx;
4578 if (count)
4580 /* Warn since no effort is made to ensure that any frame
4581 beyond the current one exists or can be safely reached. */
4582 warning (OPT_Wframe_address, "calling %qD with "
4583 "a nonzero argument is unsafe", fndecl);
4586 /* For __builtin_frame_address, return what we've got. */
4587 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4588 return tem;
4590 if (!REG_P (tem)
4591 && ! CONSTANT_P (tem))
4592 tem = copy_addr_to_reg (tem);
4593 return tem;
4597 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4598 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4599 is the same as for allocate_dynamic_stack_space. */
4601 static rtx
4602 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4604 rtx op0;
4605 rtx result;
4606 bool valid_arglist;
4607 unsigned int align;
4608 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4609 == BUILT_IN_ALLOCA_WITH_ALIGN);
4611 valid_arglist
4612 = (alloca_with_align
4613 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4614 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4616 if (!valid_arglist)
4617 return NULL_RTX;
4619 /* Compute the argument. */
4620 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4622 /* Compute the alignment. */
4623 align = (alloca_with_align
4624 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4625 : BIGGEST_ALIGNMENT);
4627 /* Allocate the desired space. */
4628 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4629 result = convert_memory_address (ptr_mode, result);
4631 return result;
4634 /* Expand a call to bswap builtin in EXP.
4635 Return NULL_RTX if a normal call should be emitted rather than expanding the
4636 function in-line. If convenient, the result should be placed in TARGET.
4637 SUBTARGET may be used as the target for computing one of EXP's operands. */
4639 static rtx
4640 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4641 rtx subtarget)
4643 tree arg;
4644 rtx op0;
4646 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4647 return NULL_RTX;
4649 arg = CALL_EXPR_ARG (exp, 0);
4650 op0 = expand_expr (arg,
4651 subtarget && GET_MODE (subtarget) == target_mode
4652 ? subtarget : NULL_RTX,
4653 target_mode, EXPAND_NORMAL);
4654 if (GET_MODE (op0) != target_mode)
4655 op0 = convert_to_mode (target_mode, op0, 1);
4657 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4659 gcc_assert (target);
4661 return convert_to_mode (target_mode, target, 1);
4664 /* Expand a call to a unary builtin in EXP.
4665 Return NULL_RTX if a normal call should be emitted rather than expanding the
4666 function in-line. If convenient, the result should be placed in TARGET.
4667 SUBTARGET may be used as the target for computing one of EXP's operands. */
4669 static rtx
4670 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4671 rtx subtarget, optab op_optab)
4673 rtx op0;
4675 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4676 return NULL_RTX;
4678 /* Compute the argument. */
4679 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4680 (subtarget
4681 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4682 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4683 VOIDmode, EXPAND_NORMAL);
4684 /* Compute op, into TARGET if possible.
4685 Set TARGET to wherever the result comes back. */
4686 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4687 op_optab, op0, target, op_optab != clrsb_optab);
4688 gcc_assert (target);
4690 return convert_to_mode (target_mode, target, 0);
4693 /* Expand a call to __builtin_expect. We just return our argument
4694 as the builtin_expect semantic should've been already executed by
4695 tree branch prediction pass. */
4697 static rtx
4698 expand_builtin_expect (tree exp, rtx target)
4700 tree arg;
4702 if (call_expr_nargs (exp) < 2)
4703 return const0_rtx;
4704 arg = CALL_EXPR_ARG (exp, 0);
4706 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4707 /* When guessing was done, the hints should be already stripped away. */
4708 gcc_assert (!flag_guess_branch_prob
4709 || optimize == 0 || seen_error ());
4710 return target;
4713 /* Expand a call to __builtin_assume_aligned. We just return our first
4714 argument as the builtin_assume_aligned semantic should've been already
4715 executed by CCP. */
4717 static rtx
4718 expand_builtin_assume_aligned (tree exp, rtx target)
4720 if (call_expr_nargs (exp) < 2)
4721 return const0_rtx;
4722 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4723 EXPAND_NORMAL);
4724 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4725 && (call_expr_nargs (exp) < 3
4726 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4727 return target;
4730 void
4731 expand_builtin_trap (void)
4733 if (targetm.have_trap ())
4735 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4736 /* For trap insns when not accumulating outgoing args force
4737 REG_ARGS_SIZE note to prevent crossjumping of calls with
4738 different args sizes. */
4739 if (!ACCUMULATE_OUTGOING_ARGS)
4740 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4742 else
4743 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4744 emit_barrier ();
4747 /* Expand a call to __builtin_unreachable. We do nothing except emit
4748 a barrier saying that control flow will not pass here.
4750 It is the responsibility of the program being compiled to ensure
4751 that control flow does never reach __builtin_unreachable. */
4752 static void
4753 expand_builtin_unreachable (void)
4755 emit_barrier ();
4758 /* Expand EXP, a call to fabs, fabsf or fabsl.
4759 Return NULL_RTX if a normal call should be emitted rather than expanding
4760 the function inline. If convenient, the result should be placed
4761 in TARGET. SUBTARGET may be used as the target for computing
4762 the operand. */
4764 static rtx
4765 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4767 machine_mode mode;
4768 tree arg;
4769 rtx op0;
4771 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4772 return NULL_RTX;
4774 arg = CALL_EXPR_ARG (exp, 0);
4775 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4776 mode = TYPE_MODE (TREE_TYPE (arg));
4777 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4778 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4781 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4782 Return NULL is a normal call should be emitted rather than expanding the
4783 function inline. If convenient, the result should be placed in TARGET.
4784 SUBTARGET may be used as the target for computing the operand. */
4786 static rtx
4787 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4789 rtx op0, op1;
4790 tree arg;
4792 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4793 return NULL_RTX;
4795 arg = CALL_EXPR_ARG (exp, 0);
4796 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4798 arg = CALL_EXPR_ARG (exp, 1);
4799 op1 = expand_normal (arg);
4801 return expand_copysign (op0, op1, target);
4804 /* Expand a call to __builtin___clear_cache. */
4806 static rtx
4807 expand_builtin___clear_cache (tree exp)
4809 if (!targetm.code_for_clear_cache)
4811 #ifdef CLEAR_INSN_CACHE
4812 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4813 does something. Just do the default expansion to a call to
4814 __clear_cache(). */
4815 return NULL_RTX;
4816 #else
4817 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4818 does nothing. There is no need to call it. Do nothing. */
4819 return const0_rtx;
4820 #endif /* CLEAR_INSN_CACHE */
4823 /* We have a "clear_cache" insn, and it will handle everything. */
4824 tree begin, end;
4825 rtx begin_rtx, end_rtx;
4827 /* We must not expand to a library call. If we did, any
4828 fallback library function in libgcc that might contain a call to
4829 __builtin___clear_cache() would recurse infinitely. */
4830 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4832 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4833 return const0_rtx;
4836 if (targetm.have_clear_cache ())
4838 struct expand_operand ops[2];
4840 begin = CALL_EXPR_ARG (exp, 0);
4841 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4843 end = CALL_EXPR_ARG (exp, 1);
4844 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4846 create_address_operand (&ops[0], begin_rtx);
4847 create_address_operand (&ops[1], end_rtx);
4848 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4849 return const0_rtx;
4851 return const0_rtx;
4854 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4856 static rtx
4857 round_trampoline_addr (rtx tramp)
4859 rtx temp, addend, mask;
4861 /* If we don't need too much alignment, we'll have been guaranteed
4862 proper alignment by get_trampoline_type. */
4863 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4864 return tramp;
4866 /* Round address up to desired boundary. */
4867 temp = gen_reg_rtx (Pmode);
4868 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4869 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4871 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4872 temp, 0, OPTAB_LIB_WIDEN);
4873 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4874 temp, 0, OPTAB_LIB_WIDEN);
4876 return tramp;
4879 static rtx
4880 expand_builtin_init_trampoline (tree exp, bool onstack)
4882 tree t_tramp, t_func, t_chain;
4883 rtx m_tramp, r_tramp, r_chain, tmp;
4885 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4886 POINTER_TYPE, VOID_TYPE))
4887 return NULL_RTX;
4889 t_tramp = CALL_EXPR_ARG (exp, 0);
4890 t_func = CALL_EXPR_ARG (exp, 1);
4891 t_chain = CALL_EXPR_ARG (exp, 2);
4893 r_tramp = expand_normal (t_tramp);
4894 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4895 MEM_NOTRAP_P (m_tramp) = 1;
4897 /* If ONSTACK, the TRAMP argument should be the address of a field
4898 within the local function's FRAME decl. Either way, let's see if
4899 we can fill in the MEM_ATTRs for this memory. */
4900 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4901 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4903 /* Creator of a heap trampoline is responsible for making sure the
4904 address is aligned to at least STACK_BOUNDARY. Normally malloc
4905 will ensure this anyhow. */
4906 tmp = round_trampoline_addr (r_tramp);
4907 if (tmp != r_tramp)
4909 m_tramp = change_address (m_tramp, BLKmode, tmp);
4910 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4911 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4914 /* The FUNC argument should be the address of the nested function.
4915 Extract the actual function decl to pass to the hook. */
4916 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4917 t_func = TREE_OPERAND (t_func, 0);
4918 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4920 r_chain = expand_normal (t_chain);
4922 /* Generate insns to initialize the trampoline. */
4923 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4925 if (onstack)
4927 trampolines_created = 1;
4929 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4930 "trampoline generated for nested function %qD", t_func);
4933 return const0_rtx;
4936 static rtx
4937 expand_builtin_adjust_trampoline (tree exp)
4939 rtx tramp;
4941 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4942 return NULL_RTX;
4944 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4945 tramp = round_trampoline_addr (tramp);
4946 if (targetm.calls.trampoline_adjust_address)
4947 tramp = targetm.calls.trampoline_adjust_address (tramp);
4949 return tramp;
4952 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4953 function. The function first checks whether the back end provides
4954 an insn to implement signbit for the respective mode. If not, it
4955 checks whether the floating point format of the value is such that
4956 the sign bit can be extracted. If that is not the case, error out.
4957 EXP is the expression that is a call to the builtin function; if
4958 convenient, the result should be placed in TARGET. */
4959 static rtx
4960 expand_builtin_signbit (tree exp, rtx target)
4962 const struct real_format *fmt;
4963 machine_mode fmode, imode, rmode;
4964 tree arg;
4965 int word, bitpos;
4966 enum insn_code icode;
4967 rtx temp;
4968 location_t loc = EXPR_LOCATION (exp);
4970 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4971 return NULL_RTX;
4973 arg = CALL_EXPR_ARG (exp, 0);
4974 fmode = TYPE_MODE (TREE_TYPE (arg));
4975 rmode = TYPE_MODE (TREE_TYPE (exp));
4976 fmt = REAL_MODE_FORMAT (fmode);
4978 arg = builtin_save_expr (arg);
4980 /* Expand the argument yielding a RTX expression. */
4981 temp = expand_normal (arg);
4983 /* Check if the back end provides an insn that handles signbit for the
4984 argument's mode. */
4985 icode = optab_handler (signbit_optab, fmode);
4986 if (icode != CODE_FOR_nothing)
4988 rtx_insn *last = get_last_insn ();
4989 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4990 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4991 return target;
4992 delete_insns_since (last);
4995 /* For floating point formats without a sign bit, implement signbit
4996 as "ARG < 0.0". */
4997 bitpos = fmt->signbit_ro;
4998 if (bitpos < 0)
5000 /* But we can't do this if the format supports signed zero. */
5001 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5003 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5004 build_real (TREE_TYPE (arg), dconst0));
5005 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5008 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5010 imode = int_mode_for_mode (fmode);
5011 gcc_assert (imode != BLKmode);
5012 temp = gen_lowpart (imode, temp);
5014 else
5016 imode = word_mode;
5017 /* Handle targets with different FP word orders. */
5018 if (FLOAT_WORDS_BIG_ENDIAN)
5019 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5020 else
5021 word = bitpos / BITS_PER_WORD;
5022 temp = operand_subword_force (temp, word, fmode);
5023 bitpos = bitpos % BITS_PER_WORD;
5026 /* Force the intermediate word_mode (or narrower) result into a
5027 register. This avoids attempting to create paradoxical SUBREGs
5028 of floating point modes below. */
5029 temp = force_reg (imode, temp);
5031 /* If the bitpos is within the "result mode" lowpart, the operation
5032 can be implement with a single bitwise AND. Otherwise, we need
5033 a right shift and an AND. */
5035 if (bitpos < GET_MODE_BITSIZE (rmode))
5037 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5039 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5040 temp = gen_lowpart (rmode, temp);
5041 temp = expand_binop (rmode, and_optab, temp,
5042 immed_wide_int_const (mask, rmode),
5043 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5045 else
5047 /* Perform a logical right shift to place the signbit in the least
5048 significant bit, then truncate the result to the desired mode
5049 and mask just this bit. */
5050 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5051 temp = gen_lowpart (rmode, temp);
5052 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5053 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5056 return temp;
5059 /* Expand fork or exec calls. TARGET is the desired target of the
5060 call. EXP is the call. FN is the
5061 identificator of the actual function. IGNORE is nonzero if the
5062 value is to be ignored. */
5064 static rtx
5065 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5067 tree id, decl;
5068 tree call;
5070 /* If we are not profiling, just call the function. */
5071 if (!profile_arc_flag)
5072 return NULL_RTX;
5074 /* Otherwise call the wrapper. This should be equivalent for the rest of
5075 compiler, so the code does not diverge, and the wrapper may run the
5076 code necessary for keeping the profiling sane. */
5078 switch (DECL_FUNCTION_CODE (fn))
5080 case BUILT_IN_FORK:
5081 id = get_identifier ("__gcov_fork");
5082 break;
5084 case BUILT_IN_EXECL:
5085 id = get_identifier ("__gcov_execl");
5086 break;
5088 case BUILT_IN_EXECV:
5089 id = get_identifier ("__gcov_execv");
5090 break;
5092 case BUILT_IN_EXECLP:
5093 id = get_identifier ("__gcov_execlp");
5094 break;
5096 case BUILT_IN_EXECLE:
5097 id = get_identifier ("__gcov_execle");
5098 break;
5100 case BUILT_IN_EXECVP:
5101 id = get_identifier ("__gcov_execvp");
5102 break;
5104 case BUILT_IN_EXECVE:
5105 id = get_identifier ("__gcov_execve");
5106 break;
5108 default:
5109 gcc_unreachable ();
5112 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5113 FUNCTION_DECL, id, TREE_TYPE (fn));
5114 DECL_EXTERNAL (decl) = 1;
5115 TREE_PUBLIC (decl) = 1;
5116 DECL_ARTIFICIAL (decl) = 1;
5117 TREE_NOTHROW (decl) = 1;
5118 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5119 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5120 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5121 return expand_call (call, target, ignore);
5126 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5127 the pointer in these functions is void*, the tree optimizers may remove
5128 casts. The mode computed in expand_builtin isn't reliable either, due
5129 to __sync_bool_compare_and_swap.
5131 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5132 group of builtins. This gives us log2 of the mode size. */
5134 static inline machine_mode
5135 get_builtin_sync_mode (int fcode_diff)
5137 /* The size is not negotiable, so ask not to get BLKmode in return
5138 if the target indicates that a smaller size would be better. */
5139 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5142 /* Expand the memory expression LOC and return the appropriate memory operand
5143 for the builtin_sync operations. */
5145 static rtx
5146 get_builtin_sync_mem (tree loc, machine_mode mode)
5148 rtx addr, mem;
5150 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5151 addr = convert_memory_address (Pmode, addr);
5153 /* Note that we explicitly do not want any alias information for this
5154 memory, so that we kill all other live memories. Otherwise we don't
5155 satisfy the full barrier semantics of the intrinsic. */
5156 mem = validize_mem (gen_rtx_MEM (mode, addr));
5158 /* The alignment needs to be at least according to that of the mode. */
5159 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5160 get_pointer_alignment (loc)));
5161 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5162 MEM_VOLATILE_P (mem) = 1;
5164 return mem;
5167 /* Make sure an argument is in the right mode.
5168 EXP is the tree argument.
5169 MODE is the mode it should be in. */
5171 static rtx
5172 expand_expr_force_mode (tree exp, machine_mode mode)
5174 rtx val;
5175 machine_mode old_mode;
5177 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5178 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5179 of CONST_INTs, where we know the old_mode only from the call argument. */
5181 old_mode = GET_MODE (val);
5182 if (old_mode == VOIDmode)
5183 old_mode = TYPE_MODE (TREE_TYPE (exp));
5184 val = convert_modes (mode, old_mode, val, 1);
5185 return val;
5189 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5190 EXP is the CALL_EXPR. CODE is the rtx code
5191 that corresponds to the arithmetic or logical operation from the name;
5192 an exception here is that NOT actually means NAND. TARGET is an optional
5193 place for us to store the results; AFTER is true if this is the
5194 fetch_and_xxx form. */
5196 static rtx
5197 expand_builtin_sync_operation (machine_mode mode, tree exp,
5198 enum rtx_code code, bool after,
5199 rtx target)
5201 rtx val, mem;
5202 location_t loc = EXPR_LOCATION (exp);
5204 if (code == NOT && warn_sync_nand)
5206 tree fndecl = get_callee_fndecl (exp);
5207 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5209 static bool warned_f_a_n, warned_n_a_f;
5211 switch (fcode)
5213 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5214 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5215 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5216 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5217 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5218 if (warned_f_a_n)
5219 break;
5221 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5222 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5223 warned_f_a_n = true;
5224 break;
5226 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5227 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5228 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5229 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5230 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5231 if (warned_n_a_f)
5232 break;
5234 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5235 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5236 warned_n_a_f = true;
5237 break;
5239 default:
5240 gcc_unreachable ();
5244 /* Expand the operands. */
5245 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5246 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5248 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5249 after);
5252 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5253 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5254 true if this is the boolean form. TARGET is a place for us to store the
5255 results; this is NOT optional if IS_BOOL is true. */
5257 static rtx
5258 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5259 bool is_bool, rtx target)
5261 rtx old_val, new_val, mem;
5262 rtx *pbool, *poval;
5264 /* Expand the operands. */
5265 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5266 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5267 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5269 pbool = poval = NULL;
5270 if (target != const0_rtx)
5272 if (is_bool)
5273 pbool = &target;
5274 else
5275 poval = &target;
5277 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5278 false, MEMMODEL_SYNC_SEQ_CST,
5279 MEMMODEL_SYNC_SEQ_CST))
5280 return NULL_RTX;
5282 return target;
5285 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5286 general form is actually an atomic exchange, and some targets only
5287 support a reduced form with the second argument being a constant 1.
5288 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5289 the results. */
5291 static rtx
5292 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5293 rtx target)
5295 rtx val, mem;
5297 /* Expand the operands. */
5298 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5299 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5301 return expand_sync_lock_test_and_set (target, mem, val);
5304 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5306 static void
5307 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5309 rtx mem;
5311 /* Expand the operands. */
5312 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5314 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5317 /* Given an integer representing an ``enum memmodel'', verify its
5318 correctness and return the memory model enum. */
5320 static enum memmodel
5321 get_memmodel (tree exp)
5323 rtx op;
5324 unsigned HOST_WIDE_INT val;
5326 /* If the parameter is not a constant, it's a run time value so we'll just
5327 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5328 if (TREE_CODE (exp) != INTEGER_CST)
5329 return MEMMODEL_SEQ_CST;
5331 op = expand_normal (exp);
5333 val = INTVAL (op);
5334 if (targetm.memmodel_check)
5335 val = targetm.memmodel_check (val);
5336 else if (val & ~MEMMODEL_MASK)
5338 warning (OPT_Winvalid_memory_model,
5339 "Unknown architecture specifier in memory model to builtin.");
5340 return MEMMODEL_SEQ_CST;
5343 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5344 if (memmodel_base (val) >= MEMMODEL_LAST)
5346 warning (OPT_Winvalid_memory_model,
5347 "invalid memory model argument to builtin");
5348 return MEMMODEL_SEQ_CST;
5351 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5352 be conservative and promote consume to acquire. */
5353 if (val == MEMMODEL_CONSUME)
5354 val = MEMMODEL_ACQUIRE;
5356 return (enum memmodel) val;
5359 /* Expand the __atomic_exchange intrinsic:
5360 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5361 EXP is the CALL_EXPR.
5362 TARGET is an optional place for us to store the results. */
5364 static rtx
5365 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5367 rtx val, mem;
5368 enum memmodel model;
5370 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5372 if (!flag_inline_atomics)
5373 return NULL_RTX;
5375 /* Expand the operands. */
5376 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5377 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5379 return expand_atomic_exchange (target, mem, val, model);
5382 /* Expand the __atomic_compare_exchange intrinsic:
5383 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5384 TYPE desired, BOOL weak,
5385 enum memmodel success,
5386 enum memmodel failure)
5387 EXP is the CALL_EXPR.
5388 TARGET is an optional place for us to store the results. */
5390 static rtx
5391 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5392 rtx target)
5394 rtx expect, desired, mem, oldval;
5395 rtx_code_label *label;
5396 enum memmodel success, failure;
5397 tree weak;
5398 bool is_weak;
5400 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5401 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5403 if (failure > success)
5405 warning (OPT_Winvalid_memory_model,
5406 "failure memory model cannot be stronger than success memory "
5407 "model for %<__atomic_compare_exchange%>");
5408 success = MEMMODEL_SEQ_CST;
5411 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5413 warning (OPT_Winvalid_memory_model,
5414 "invalid failure memory model for "
5415 "%<__atomic_compare_exchange%>");
5416 failure = MEMMODEL_SEQ_CST;
5417 success = MEMMODEL_SEQ_CST;
5421 if (!flag_inline_atomics)
5422 return NULL_RTX;
5424 /* Expand the operands. */
5425 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5427 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5428 expect = convert_memory_address (Pmode, expect);
5429 expect = gen_rtx_MEM (mode, expect);
5430 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5432 weak = CALL_EXPR_ARG (exp, 3);
5433 is_weak = false;
5434 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5435 is_weak = true;
5437 if (target == const0_rtx)
5438 target = NULL;
5440 /* Lest the rtl backend create a race condition with an imporoper store
5441 to memory, always create a new pseudo for OLDVAL. */
5442 oldval = NULL;
5444 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5445 is_weak, success, failure))
5446 return NULL_RTX;
5448 /* Conditionally store back to EXPECT, lest we create a race condition
5449 with an improper store to memory. */
5450 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5451 the normal case where EXPECT is totally private, i.e. a register. At
5452 which point the store can be unconditional. */
5453 label = gen_label_rtx ();
5454 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5455 GET_MODE (target), 1, label);
5456 emit_move_insn (expect, oldval);
5457 emit_label (label);
5459 return target;
5462 /* Expand the __atomic_load intrinsic:
5463 TYPE __atomic_load (TYPE *object, enum memmodel)
5464 EXP is the CALL_EXPR.
5465 TARGET is an optional place for us to store the results. */
5467 static rtx
5468 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5470 rtx mem;
5471 enum memmodel model;
5473 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5474 if (is_mm_release (model) || is_mm_acq_rel (model))
5476 warning (OPT_Winvalid_memory_model,
5477 "invalid memory model for %<__atomic_load%>");
5478 model = MEMMODEL_SEQ_CST;
5481 if (!flag_inline_atomics)
5482 return NULL_RTX;
5484 /* Expand the operand. */
5485 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5487 return expand_atomic_load (target, mem, model);
5491 /* Expand the __atomic_store intrinsic:
5492 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5493 EXP is the CALL_EXPR.
5494 TARGET is an optional place for us to store the results. */
5496 static rtx
5497 expand_builtin_atomic_store (machine_mode mode, tree exp)
5499 rtx mem, val;
5500 enum memmodel model;
5502 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5503 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5504 || is_mm_release (model)))
5506 warning (OPT_Winvalid_memory_model,
5507 "invalid memory model for %<__atomic_store%>");
5508 model = MEMMODEL_SEQ_CST;
5511 if (!flag_inline_atomics)
5512 return NULL_RTX;
5514 /* Expand the operands. */
5515 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5516 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5518 return expand_atomic_store (mem, val, model, false);
5521 /* Expand the __atomic_fetch_XXX intrinsic:
5522 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5523 EXP is the CALL_EXPR.
5524 TARGET is an optional place for us to store the results.
5525 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5526 FETCH_AFTER is true if returning the result of the operation.
5527 FETCH_AFTER is false if returning the value before the operation.
5528 IGNORE is true if the result is not used.
5529 EXT_CALL is the correct builtin for an external call if this cannot be
5530 resolved to an instruction sequence. */
5532 static rtx
5533 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5534 enum rtx_code code, bool fetch_after,
5535 bool ignore, enum built_in_function ext_call)
5537 rtx val, mem, ret;
5538 enum memmodel model;
5539 tree fndecl;
5540 tree addr;
5542 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5544 /* Expand the operands. */
5545 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5546 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5548 /* Only try generating instructions if inlining is turned on. */
5549 if (flag_inline_atomics)
5551 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5552 if (ret)
5553 return ret;
5556 /* Return if a different routine isn't needed for the library call. */
5557 if (ext_call == BUILT_IN_NONE)
5558 return NULL_RTX;
5560 /* Change the call to the specified function. */
5561 fndecl = get_callee_fndecl (exp);
5562 addr = CALL_EXPR_FN (exp);
5563 STRIP_NOPS (addr);
5565 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5566 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5568 /* Expand the call here so we can emit trailing code. */
5569 ret = expand_call (exp, target, ignore);
5571 /* Replace the original function just in case it matters. */
5572 TREE_OPERAND (addr, 0) = fndecl;
5574 /* Then issue the arithmetic correction to return the right result. */
5575 if (!ignore)
5577 if (code == NOT)
5579 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5580 OPTAB_LIB_WIDEN);
5581 ret = expand_simple_unop (mode, NOT, ret, target, true);
5583 else
5584 ret = expand_simple_binop (mode, code, ret, val, target, true,
5585 OPTAB_LIB_WIDEN);
5587 return ret;
5590 /* Expand an atomic clear operation.
5591 void _atomic_clear (BOOL *obj, enum memmodel)
5592 EXP is the call expression. */
5594 static rtx
5595 expand_builtin_atomic_clear (tree exp)
5597 machine_mode mode;
5598 rtx mem, ret;
5599 enum memmodel model;
5601 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5602 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5603 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5605 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5607 warning (OPT_Winvalid_memory_model,
5608 "invalid memory model for %<__atomic_store%>");
5609 model = MEMMODEL_SEQ_CST;
5612 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5613 Failing that, a store is issued by __atomic_store. The only way this can
5614 fail is if the bool type is larger than a word size. Unlikely, but
5615 handle it anyway for completeness. Assume a single threaded model since
5616 there is no atomic support in this case, and no barriers are required. */
5617 ret = expand_atomic_store (mem, const0_rtx, model, true);
5618 if (!ret)
5619 emit_move_insn (mem, const0_rtx);
5620 return const0_rtx;
5623 /* Expand an atomic test_and_set operation.
5624 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5625 EXP is the call expression. */
5627 static rtx
5628 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5630 rtx mem;
5631 enum memmodel model;
5632 machine_mode mode;
5634 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5635 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5636 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5638 return expand_atomic_test_and_set (target, mem, model);
5642 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5643 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5645 static tree
5646 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5648 int size;
5649 machine_mode mode;
5650 unsigned int mode_align, type_align;
5652 if (TREE_CODE (arg0) != INTEGER_CST)
5653 return NULL_TREE;
5655 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5656 mode = mode_for_size (size, MODE_INT, 0);
5657 mode_align = GET_MODE_ALIGNMENT (mode);
5659 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5660 type_align = mode_align;
5661 else
5663 tree ttype = TREE_TYPE (arg1);
5665 /* This function is usually invoked and folded immediately by the front
5666 end before anything else has a chance to look at it. The pointer
5667 parameter at this point is usually cast to a void *, so check for that
5668 and look past the cast. */
5669 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5670 && VOID_TYPE_P (TREE_TYPE (ttype)))
5671 arg1 = TREE_OPERAND (arg1, 0);
5673 ttype = TREE_TYPE (arg1);
5674 gcc_assert (POINTER_TYPE_P (ttype));
5676 /* Get the underlying type of the object. */
5677 ttype = TREE_TYPE (ttype);
5678 type_align = TYPE_ALIGN (ttype);
5681 /* If the object has smaller alignment, the lock free routines cannot
5682 be used. */
5683 if (type_align < mode_align)
5684 return boolean_false_node;
5686 /* Check if a compare_and_swap pattern exists for the mode which represents
5687 the required size. The pattern is not allowed to fail, so the existence
5688 of the pattern indicates support is present. */
5689 if (can_compare_and_swap_p (mode, true))
5690 return boolean_true_node;
5691 else
5692 return boolean_false_node;
5695 /* Return true if the parameters to call EXP represent an object which will
5696 always generate lock free instructions. The first argument represents the
5697 size of the object, and the second parameter is a pointer to the object
5698 itself. If NULL is passed for the object, then the result is based on
5699 typical alignment for an object of the specified size. Otherwise return
5700 false. */
5702 static rtx
5703 expand_builtin_atomic_always_lock_free (tree exp)
5705 tree size;
5706 tree arg0 = CALL_EXPR_ARG (exp, 0);
5707 tree arg1 = CALL_EXPR_ARG (exp, 1);
5709 if (TREE_CODE (arg0) != INTEGER_CST)
5711 error ("non-constant argument 1 to __atomic_always_lock_free");
5712 return const0_rtx;
5715 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5716 if (size == boolean_true_node)
5717 return const1_rtx;
5718 return const0_rtx;
5721 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5722 is lock free on this architecture. */
5724 static tree
5725 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5727 if (!flag_inline_atomics)
5728 return NULL_TREE;
5730 /* If it isn't always lock free, don't generate a result. */
5731 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5732 return boolean_true_node;
5734 return NULL_TREE;
5737 /* Return true if the parameters to call EXP represent an object which will
5738 always generate lock free instructions. The first argument represents the
5739 size of the object, and the second parameter is a pointer to the object
5740 itself. If NULL is passed for the object, then the result is based on
5741 typical alignment for an object of the specified size. Otherwise return
5742 NULL*/
5744 static rtx
5745 expand_builtin_atomic_is_lock_free (tree exp)
5747 tree size;
5748 tree arg0 = CALL_EXPR_ARG (exp, 0);
5749 tree arg1 = CALL_EXPR_ARG (exp, 1);
5751 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5753 error ("non-integer argument 1 to __atomic_is_lock_free");
5754 return NULL_RTX;
5757 if (!flag_inline_atomics)
5758 return NULL_RTX;
5760 /* If the value is known at compile time, return the RTX for it. */
5761 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5762 if (size == boolean_true_node)
5763 return const1_rtx;
5765 return NULL_RTX;
5768 /* Expand the __atomic_thread_fence intrinsic:
5769 void __atomic_thread_fence (enum memmodel)
5770 EXP is the CALL_EXPR. */
5772 static void
5773 expand_builtin_atomic_thread_fence (tree exp)
5775 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5776 expand_mem_thread_fence (model);
5779 /* Expand the __atomic_signal_fence intrinsic:
5780 void __atomic_signal_fence (enum memmodel)
5781 EXP is the CALL_EXPR. */
5783 static void
5784 expand_builtin_atomic_signal_fence (tree exp)
5786 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5787 expand_mem_signal_fence (model);
5790 /* Expand the __sync_synchronize intrinsic. */
5792 static void
5793 expand_builtin_sync_synchronize (void)
5795 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5798 static rtx
5799 expand_builtin_thread_pointer (tree exp, rtx target)
5801 enum insn_code icode;
5802 if (!validate_arglist (exp, VOID_TYPE))
5803 return const0_rtx;
5804 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5805 if (icode != CODE_FOR_nothing)
5807 struct expand_operand op;
5808 /* If the target is not sutitable then create a new target. */
5809 if (target == NULL_RTX
5810 || !REG_P (target)
5811 || GET_MODE (target) != Pmode)
5812 target = gen_reg_rtx (Pmode);
5813 create_output_operand (&op, target, Pmode);
5814 expand_insn (icode, 1, &op);
5815 return target;
5817 error ("__builtin_thread_pointer is not supported on this target");
5818 return const0_rtx;
5821 static void
5822 expand_builtin_set_thread_pointer (tree exp)
5824 enum insn_code icode;
5825 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5826 return;
5827 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5828 if (icode != CODE_FOR_nothing)
5830 struct expand_operand op;
5831 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5832 Pmode, EXPAND_NORMAL);
5833 create_input_operand (&op, val, Pmode);
5834 expand_insn (icode, 1, &op);
5835 return;
5837 error ("__builtin_set_thread_pointer is not supported on this target");
5841 /* Emit code to restore the current value of stack. */
5843 static void
5844 expand_stack_restore (tree var)
5846 rtx_insn *prev;
5847 rtx sa = expand_normal (var);
5849 sa = convert_memory_address (Pmode, sa);
5851 prev = get_last_insn ();
5852 emit_stack_restore (SAVE_BLOCK, sa);
5854 record_new_stack_level ();
5856 fixup_args_size_notes (prev, get_last_insn (), 0);
5859 /* Emit code to save the current value of stack. */
5861 static rtx
5862 expand_stack_save (void)
5864 rtx ret = NULL_RTX;
5866 emit_stack_save (SAVE_BLOCK, &ret);
5867 return ret;
5871 /* Expand OpenACC acc_on_device.
5873 This has to happen late (that is, not in early folding; expand_builtin_*,
5874 rather than fold_builtin_*), as we have to act differently for host and
5875 acceleration device (ACCEL_COMPILER conditional). */
5877 static rtx
5878 expand_builtin_acc_on_device (tree exp, rtx target)
5880 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5881 return NULL_RTX;
5883 tree arg = CALL_EXPR_ARG (exp, 0);
5885 /* Return (arg == v1 || arg == v2) ? 1 : 0. */
5886 machine_mode v_mode = TYPE_MODE (TREE_TYPE (arg));
5887 rtx v = expand_normal (arg), v1, v2;
5888 #ifdef ACCEL_COMPILER
5889 v1 = GEN_INT (GOMP_DEVICE_NOT_HOST);
5890 v2 = GEN_INT (ACCEL_COMPILER_acc_device);
5891 #else
5892 v1 = GEN_INT (GOMP_DEVICE_NONE);
5893 v2 = GEN_INT (GOMP_DEVICE_HOST);
5894 #endif
5895 machine_mode target_mode = TYPE_MODE (integer_type_node);
5896 if (!target || !register_operand (target, target_mode))
5897 target = gen_reg_rtx (target_mode);
5898 emit_move_insn (target, const1_rtx);
5899 rtx_code_label *done_label = gen_label_rtx ();
5900 do_compare_rtx_and_jump (v, v1, EQ, false, v_mode, NULL_RTX,
5901 NULL, done_label, PROB_EVEN);
5902 do_compare_rtx_and_jump (v, v2, EQ, false, v_mode, NULL_RTX,
5903 NULL, done_label, PROB_EVEN);
5904 emit_move_insn (target, const0_rtx);
5905 emit_label (done_label);
5907 return target;
5911 /* Expand an expression EXP that calls a built-in function,
5912 with result going to TARGET if that's convenient
5913 (and in mode MODE if that's convenient).
5914 SUBTARGET may be used as the target for computing one of EXP's operands.
5915 IGNORE is nonzero if the value is to be ignored. */
5918 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5919 int ignore)
5921 tree fndecl = get_callee_fndecl (exp);
5922 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5923 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5924 int flags;
5926 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5927 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5929 /* When ASan is enabled, we don't want to expand some memory/string
5930 builtins and rely on libsanitizer's hooks. This allows us to avoid
5931 redundant checks and be sure, that possible overflow will be detected
5932 by ASan. */
5934 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5935 return expand_call (exp, target, ignore);
5937 /* When not optimizing, generate calls to library functions for a certain
5938 set of builtins. */
5939 if (!optimize
5940 && !called_as_built_in (fndecl)
5941 && fcode != BUILT_IN_FORK
5942 && fcode != BUILT_IN_EXECL
5943 && fcode != BUILT_IN_EXECV
5944 && fcode != BUILT_IN_EXECLP
5945 && fcode != BUILT_IN_EXECLE
5946 && fcode != BUILT_IN_EXECVP
5947 && fcode != BUILT_IN_EXECVE
5948 && fcode != BUILT_IN_ALLOCA
5949 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5950 && fcode != BUILT_IN_FREE
5951 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5952 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5953 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5954 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5955 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5956 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5957 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5958 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5959 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5960 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5961 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5962 && fcode != BUILT_IN_CHKP_BNDRET)
5963 return expand_call (exp, target, ignore);
5965 /* The built-in function expanders test for target == const0_rtx
5966 to determine whether the function's result will be ignored. */
5967 if (ignore)
5968 target = const0_rtx;
5970 /* If the result of a pure or const built-in function is ignored, and
5971 none of its arguments are volatile, we can avoid expanding the
5972 built-in call and just evaluate the arguments for side-effects. */
5973 if (target == const0_rtx
5974 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5975 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5977 bool volatilep = false;
5978 tree arg;
5979 call_expr_arg_iterator iter;
5981 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5982 if (TREE_THIS_VOLATILE (arg))
5984 volatilep = true;
5985 break;
5988 if (! volatilep)
5990 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5991 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5992 return const0_rtx;
5996 /* expand_builtin_with_bounds is supposed to be used for
5997 instrumented builtin calls. */
5998 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6000 switch (fcode)
6002 CASE_FLT_FN (BUILT_IN_FABS):
6003 case BUILT_IN_FABSD32:
6004 case BUILT_IN_FABSD64:
6005 case BUILT_IN_FABSD128:
6006 target = expand_builtin_fabs (exp, target, subtarget);
6007 if (target)
6008 return target;
6009 break;
6011 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6012 target = expand_builtin_copysign (exp, target, subtarget);
6013 if (target)
6014 return target;
6015 break;
6017 /* Just do a normal library call if we were unable to fold
6018 the values. */
6019 CASE_FLT_FN (BUILT_IN_CABS):
6020 break;
6022 CASE_FLT_FN (BUILT_IN_EXP):
6023 CASE_FLT_FN (BUILT_IN_EXP10):
6024 CASE_FLT_FN (BUILT_IN_POW10):
6025 CASE_FLT_FN (BUILT_IN_EXP2):
6026 CASE_FLT_FN (BUILT_IN_EXPM1):
6027 CASE_FLT_FN (BUILT_IN_LOGB):
6028 CASE_FLT_FN (BUILT_IN_LOG):
6029 CASE_FLT_FN (BUILT_IN_LOG10):
6030 CASE_FLT_FN (BUILT_IN_LOG2):
6031 CASE_FLT_FN (BUILT_IN_LOG1P):
6032 CASE_FLT_FN (BUILT_IN_TAN):
6033 CASE_FLT_FN (BUILT_IN_ASIN):
6034 CASE_FLT_FN (BUILT_IN_ACOS):
6035 CASE_FLT_FN (BUILT_IN_ATAN):
6036 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6037 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6038 because of possible accuracy problems. */
6039 if (! flag_unsafe_math_optimizations)
6040 break;
6041 CASE_FLT_FN (BUILT_IN_SQRT):
6042 CASE_FLT_FN (BUILT_IN_FLOOR):
6043 CASE_FLT_FN (BUILT_IN_CEIL):
6044 CASE_FLT_FN (BUILT_IN_TRUNC):
6045 CASE_FLT_FN (BUILT_IN_ROUND):
6046 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6047 CASE_FLT_FN (BUILT_IN_RINT):
6048 target = expand_builtin_mathfn (exp, target, subtarget);
6049 if (target)
6050 return target;
6051 break;
6053 CASE_FLT_FN (BUILT_IN_FMA):
6054 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6055 if (target)
6056 return target;
6057 break;
6059 CASE_FLT_FN (BUILT_IN_ILOGB):
6060 if (! flag_unsafe_math_optimizations)
6061 break;
6062 CASE_FLT_FN (BUILT_IN_ISINF):
6063 CASE_FLT_FN (BUILT_IN_FINITE):
6064 case BUILT_IN_ISFINITE:
6065 case BUILT_IN_ISNORMAL:
6066 target = expand_builtin_interclass_mathfn (exp, target);
6067 if (target)
6068 return target;
6069 break;
6071 CASE_FLT_FN (BUILT_IN_ICEIL):
6072 CASE_FLT_FN (BUILT_IN_LCEIL):
6073 CASE_FLT_FN (BUILT_IN_LLCEIL):
6074 CASE_FLT_FN (BUILT_IN_LFLOOR):
6075 CASE_FLT_FN (BUILT_IN_IFLOOR):
6076 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6077 target = expand_builtin_int_roundingfn (exp, target);
6078 if (target)
6079 return target;
6080 break;
6082 CASE_FLT_FN (BUILT_IN_IRINT):
6083 CASE_FLT_FN (BUILT_IN_LRINT):
6084 CASE_FLT_FN (BUILT_IN_LLRINT):
6085 CASE_FLT_FN (BUILT_IN_IROUND):
6086 CASE_FLT_FN (BUILT_IN_LROUND):
6087 CASE_FLT_FN (BUILT_IN_LLROUND):
6088 target = expand_builtin_int_roundingfn_2 (exp, target);
6089 if (target)
6090 return target;
6091 break;
6093 CASE_FLT_FN (BUILT_IN_POWI):
6094 target = expand_builtin_powi (exp, target);
6095 if (target)
6096 return target;
6097 break;
6099 CASE_FLT_FN (BUILT_IN_ATAN2):
6100 CASE_FLT_FN (BUILT_IN_LDEXP):
6101 CASE_FLT_FN (BUILT_IN_SCALB):
6102 CASE_FLT_FN (BUILT_IN_SCALBN):
6103 CASE_FLT_FN (BUILT_IN_SCALBLN):
6104 if (! flag_unsafe_math_optimizations)
6105 break;
6107 CASE_FLT_FN (BUILT_IN_FMOD):
6108 CASE_FLT_FN (BUILT_IN_REMAINDER):
6109 CASE_FLT_FN (BUILT_IN_DREM):
6110 CASE_FLT_FN (BUILT_IN_POW):
6111 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6112 if (target)
6113 return target;
6114 break;
6116 CASE_FLT_FN (BUILT_IN_CEXPI):
6117 target = expand_builtin_cexpi (exp, target);
6118 gcc_assert (target);
6119 return target;
6121 CASE_FLT_FN (BUILT_IN_SIN):
6122 CASE_FLT_FN (BUILT_IN_COS):
6123 if (! flag_unsafe_math_optimizations)
6124 break;
6125 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6126 if (target)
6127 return target;
6128 break;
6130 CASE_FLT_FN (BUILT_IN_SINCOS):
6131 if (! flag_unsafe_math_optimizations)
6132 break;
6133 target = expand_builtin_sincos (exp);
6134 if (target)
6135 return target;
6136 break;
6138 case BUILT_IN_APPLY_ARGS:
6139 return expand_builtin_apply_args ();
6141 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6142 FUNCTION with a copy of the parameters described by
6143 ARGUMENTS, and ARGSIZE. It returns a block of memory
6144 allocated on the stack into which is stored all the registers
6145 that might possibly be used for returning the result of a
6146 function. ARGUMENTS is the value returned by
6147 __builtin_apply_args. ARGSIZE is the number of bytes of
6148 arguments that must be copied. ??? How should this value be
6149 computed? We'll also need a safe worst case value for varargs
6150 functions. */
6151 case BUILT_IN_APPLY:
6152 if (!validate_arglist (exp, POINTER_TYPE,
6153 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6154 && !validate_arglist (exp, REFERENCE_TYPE,
6155 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6156 return const0_rtx;
6157 else
6159 rtx ops[3];
6161 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6162 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6163 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6165 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6168 /* __builtin_return (RESULT) causes the function to return the
6169 value described by RESULT. RESULT is address of the block of
6170 memory returned by __builtin_apply. */
6171 case BUILT_IN_RETURN:
6172 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6173 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6174 return const0_rtx;
6176 case BUILT_IN_SAVEREGS:
6177 return expand_builtin_saveregs ();
6179 case BUILT_IN_VA_ARG_PACK:
6180 /* All valid uses of __builtin_va_arg_pack () are removed during
6181 inlining. */
6182 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6183 return const0_rtx;
6185 case BUILT_IN_VA_ARG_PACK_LEN:
6186 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6187 inlining. */
6188 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6189 return const0_rtx;
6191 /* Return the address of the first anonymous stack arg. */
6192 case BUILT_IN_NEXT_ARG:
6193 if (fold_builtin_next_arg (exp, false))
6194 return const0_rtx;
6195 return expand_builtin_next_arg ();
6197 case BUILT_IN_CLEAR_CACHE:
6198 target = expand_builtin___clear_cache (exp);
6199 if (target)
6200 return target;
6201 break;
6203 case BUILT_IN_CLASSIFY_TYPE:
6204 return expand_builtin_classify_type (exp);
6206 case BUILT_IN_CONSTANT_P:
6207 return const0_rtx;
6209 case BUILT_IN_FRAME_ADDRESS:
6210 case BUILT_IN_RETURN_ADDRESS:
6211 return expand_builtin_frame_address (fndecl, exp);
6213 /* Returns the address of the area where the structure is returned.
6214 0 otherwise. */
6215 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6216 if (call_expr_nargs (exp) != 0
6217 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6218 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6219 return const0_rtx;
6220 else
6221 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6223 case BUILT_IN_ALLOCA:
6224 case BUILT_IN_ALLOCA_WITH_ALIGN:
6225 /* If the allocation stems from the declaration of a variable-sized
6226 object, it cannot accumulate. */
6227 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6228 if (target)
6229 return target;
6230 break;
6232 case BUILT_IN_STACK_SAVE:
6233 return expand_stack_save ();
6235 case BUILT_IN_STACK_RESTORE:
6236 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6237 return const0_rtx;
6239 case BUILT_IN_BSWAP16:
6240 case BUILT_IN_BSWAP32:
6241 case BUILT_IN_BSWAP64:
6242 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6243 if (target)
6244 return target;
6245 break;
6247 CASE_INT_FN (BUILT_IN_FFS):
6248 target = expand_builtin_unop (target_mode, exp, target,
6249 subtarget, ffs_optab);
6250 if (target)
6251 return target;
6252 break;
6254 CASE_INT_FN (BUILT_IN_CLZ):
6255 target = expand_builtin_unop (target_mode, exp, target,
6256 subtarget, clz_optab);
6257 if (target)
6258 return target;
6259 break;
6261 CASE_INT_FN (BUILT_IN_CTZ):
6262 target = expand_builtin_unop (target_mode, exp, target,
6263 subtarget, ctz_optab);
6264 if (target)
6265 return target;
6266 break;
6268 CASE_INT_FN (BUILT_IN_CLRSB):
6269 target = expand_builtin_unop (target_mode, exp, target,
6270 subtarget, clrsb_optab);
6271 if (target)
6272 return target;
6273 break;
6275 CASE_INT_FN (BUILT_IN_POPCOUNT):
6276 target = expand_builtin_unop (target_mode, exp, target,
6277 subtarget, popcount_optab);
6278 if (target)
6279 return target;
6280 break;
6282 CASE_INT_FN (BUILT_IN_PARITY):
6283 target = expand_builtin_unop (target_mode, exp, target,
6284 subtarget, parity_optab);
6285 if (target)
6286 return target;
6287 break;
6289 case BUILT_IN_STRLEN:
6290 target = expand_builtin_strlen (exp, target, target_mode);
6291 if (target)
6292 return target;
6293 break;
6295 case BUILT_IN_STRCPY:
6296 target = expand_builtin_strcpy (exp, target);
6297 if (target)
6298 return target;
6299 break;
6301 case BUILT_IN_STRNCPY:
6302 target = expand_builtin_strncpy (exp, target);
6303 if (target)
6304 return target;
6305 break;
6307 case BUILT_IN_STPCPY:
6308 target = expand_builtin_stpcpy (exp, target, mode);
6309 if (target)
6310 return target;
6311 break;
6313 case BUILT_IN_MEMCPY:
6314 target = expand_builtin_memcpy (exp, target);
6315 if (target)
6316 return target;
6317 break;
6319 case BUILT_IN_MEMPCPY:
6320 target = expand_builtin_mempcpy (exp, target, mode);
6321 if (target)
6322 return target;
6323 break;
6325 case BUILT_IN_MEMSET:
6326 target = expand_builtin_memset (exp, target, mode);
6327 if (target)
6328 return target;
6329 break;
6331 case BUILT_IN_BZERO:
6332 target = expand_builtin_bzero (exp);
6333 if (target)
6334 return target;
6335 break;
6337 case BUILT_IN_STRCMP:
6338 target = expand_builtin_strcmp (exp, target);
6339 if (target)
6340 return target;
6341 break;
6343 case BUILT_IN_STRNCMP:
6344 target = expand_builtin_strncmp (exp, target, mode);
6345 if (target)
6346 return target;
6347 break;
6349 case BUILT_IN_BCMP:
6350 case BUILT_IN_MEMCMP:
6351 target = expand_builtin_memcmp (exp, target, mode);
6352 if (target)
6353 return target;
6354 break;
6356 case BUILT_IN_SETJMP:
6357 /* This should have been lowered to the builtins below. */
6358 gcc_unreachable ();
6360 case BUILT_IN_SETJMP_SETUP:
6361 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6362 and the receiver label. */
6363 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6365 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6366 VOIDmode, EXPAND_NORMAL);
6367 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6368 rtx_insn *label_r = label_rtx (label);
6370 /* This is copied from the handling of non-local gotos. */
6371 expand_builtin_setjmp_setup (buf_addr, label_r);
6372 nonlocal_goto_handler_labels
6373 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6374 nonlocal_goto_handler_labels);
6375 /* ??? Do not let expand_label treat us as such since we would
6376 not want to be both on the list of non-local labels and on
6377 the list of forced labels. */
6378 FORCED_LABEL (label) = 0;
6379 return const0_rtx;
6381 break;
6383 case BUILT_IN_SETJMP_RECEIVER:
6384 /* __builtin_setjmp_receiver is passed the receiver label. */
6385 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6387 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6388 rtx_insn *label_r = label_rtx (label);
6390 expand_builtin_setjmp_receiver (label_r);
6391 return const0_rtx;
6393 break;
6395 /* __builtin_longjmp is passed a pointer to an array of five words.
6396 It's similar to the C library longjmp function but works with
6397 __builtin_setjmp above. */
6398 case BUILT_IN_LONGJMP:
6399 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6401 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6402 VOIDmode, EXPAND_NORMAL);
6403 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6405 if (value != const1_rtx)
6407 error ("%<__builtin_longjmp%> second argument must be 1");
6408 return const0_rtx;
6411 expand_builtin_longjmp (buf_addr, value);
6412 return const0_rtx;
6414 break;
6416 case BUILT_IN_NONLOCAL_GOTO:
6417 target = expand_builtin_nonlocal_goto (exp);
6418 if (target)
6419 return target;
6420 break;
6422 /* This updates the setjmp buffer that is its argument with the value
6423 of the current stack pointer. */
6424 case BUILT_IN_UPDATE_SETJMP_BUF:
6425 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6427 rtx buf_addr
6428 = expand_normal (CALL_EXPR_ARG (exp, 0));
6430 expand_builtin_update_setjmp_buf (buf_addr);
6431 return const0_rtx;
6433 break;
6435 case BUILT_IN_TRAP:
6436 expand_builtin_trap ();
6437 return const0_rtx;
6439 case BUILT_IN_UNREACHABLE:
6440 expand_builtin_unreachable ();
6441 return const0_rtx;
6443 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6444 case BUILT_IN_SIGNBITD32:
6445 case BUILT_IN_SIGNBITD64:
6446 case BUILT_IN_SIGNBITD128:
6447 target = expand_builtin_signbit (exp, target);
6448 if (target)
6449 return target;
6450 break;
6452 /* Various hooks for the DWARF 2 __throw routine. */
6453 case BUILT_IN_UNWIND_INIT:
6454 expand_builtin_unwind_init ();
6455 return const0_rtx;
6456 case BUILT_IN_DWARF_CFA:
6457 return virtual_cfa_rtx;
6458 #ifdef DWARF2_UNWIND_INFO
6459 case BUILT_IN_DWARF_SP_COLUMN:
6460 return expand_builtin_dwarf_sp_column ();
6461 case BUILT_IN_INIT_DWARF_REG_SIZES:
6462 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6463 return const0_rtx;
6464 #endif
6465 case BUILT_IN_FROB_RETURN_ADDR:
6466 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6467 case BUILT_IN_EXTRACT_RETURN_ADDR:
6468 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6469 case BUILT_IN_EH_RETURN:
6470 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6471 CALL_EXPR_ARG (exp, 1));
6472 return const0_rtx;
6473 case BUILT_IN_EH_RETURN_DATA_REGNO:
6474 return expand_builtin_eh_return_data_regno (exp);
6475 case BUILT_IN_EXTEND_POINTER:
6476 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6477 case BUILT_IN_EH_POINTER:
6478 return expand_builtin_eh_pointer (exp);
6479 case BUILT_IN_EH_FILTER:
6480 return expand_builtin_eh_filter (exp);
6481 case BUILT_IN_EH_COPY_VALUES:
6482 return expand_builtin_eh_copy_values (exp);
6484 case BUILT_IN_VA_START:
6485 return expand_builtin_va_start (exp);
6486 case BUILT_IN_VA_END:
6487 return expand_builtin_va_end (exp);
6488 case BUILT_IN_VA_COPY:
6489 return expand_builtin_va_copy (exp);
6490 case BUILT_IN_EXPECT:
6491 return expand_builtin_expect (exp, target);
6492 case BUILT_IN_ASSUME_ALIGNED:
6493 return expand_builtin_assume_aligned (exp, target);
6494 case BUILT_IN_PREFETCH:
6495 expand_builtin_prefetch (exp);
6496 return const0_rtx;
6498 case BUILT_IN_INIT_TRAMPOLINE:
6499 return expand_builtin_init_trampoline (exp, true);
6500 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6501 return expand_builtin_init_trampoline (exp, false);
6502 case BUILT_IN_ADJUST_TRAMPOLINE:
6503 return expand_builtin_adjust_trampoline (exp);
6505 case BUILT_IN_FORK:
6506 case BUILT_IN_EXECL:
6507 case BUILT_IN_EXECV:
6508 case BUILT_IN_EXECLP:
6509 case BUILT_IN_EXECLE:
6510 case BUILT_IN_EXECVP:
6511 case BUILT_IN_EXECVE:
6512 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6513 if (target)
6514 return target;
6515 break;
6517 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6518 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6519 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6520 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6521 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6522 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6523 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6524 if (target)
6525 return target;
6526 break;
6528 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6529 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6530 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6531 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6532 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6533 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6534 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6535 if (target)
6536 return target;
6537 break;
6539 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6540 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6541 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6542 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6543 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6544 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6545 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6546 if (target)
6547 return target;
6548 break;
6550 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6551 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6552 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6553 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6554 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6555 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6556 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6557 if (target)
6558 return target;
6559 break;
6561 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6562 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6563 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6564 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6565 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6566 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6567 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6568 if (target)
6569 return target;
6570 break;
6572 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6573 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6574 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6575 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6576 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6577 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6578 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6579 if (target)
6580 return target;
6581 break;
6583 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6584 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6585 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6586 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6587 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6588 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6589 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6590 if (target)
6591 return target;
6592 break;
6594 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6595 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6596 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6597 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6598 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6599 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6600 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6601 if (target)
6602 return target;
6603 break;
6605 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6606 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6607 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6608 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6609 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6610 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6611 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6612 if (target)
6613 return target;
6614 break;
6616 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6617 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6618 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6619 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6620 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6621 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6622 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6623 if (target)
6624 return target;
6625 break;
6627 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6628 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6629 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6630 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6631 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6632 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6633 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6634 if (target)
6635 return target;
6636 break;
6638 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6639 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6640 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6641 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6642 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6643 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6644 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6645 if (target)
6646 return target;
6647 break;
6649 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6650 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6651 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6652 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6653 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6654 if (mode == VOIDmode)
6655 mode = TYPE_MODE (boolean_type_node);
6656 if (!target || !register_operand (target, mode))
6657 target = gen_reg_rtx (mode);
6659 mode = get_builtin_sync_mode
6660 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6661 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6662 if (target)
6663 return target;
6664 break;
6666 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6667 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6668 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6669 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6670 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6671 mode = get_builtin_sync_mode
6672 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6673 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6674 if (target)
6675 return target;
6676 break;
6678 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6679 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6680 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6681 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6682 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6683 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6684 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6685 if (target)
6686 return target;
6687 break;
6689 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6690 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6691 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6692 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6693 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6694 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6695 expand_builtin_sync_lock_release (mode, exp);
6696 return const0_rtx;
6698 case BUILT_IN_SYNC_SYNCHRONIZE:
6699 expand_builtin_sync_synchronize ();
6700 return const0_rtx;
6702 case BUILT_IN_ATOMIC_EXCHANGE_1:
6703 case BUILT_IN_ATOMIC_EXCHANGE_2:
6704 case BUILT_IN_ATOMIC_EXCHANGE_4:
6705 case BUILT_IN_ATOMIC_EXCHANGE_8:
6706 case BUILT_IN_ATOMIC_EXCHANGE_16:
6707 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6708 target = expand_builtin_atomic_exchange (mode, exp, target);
6709 if (target)
6710 return target;
6711 break;
6713 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6714 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6715 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6716 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6717 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6719 unsigned int nargs, z;
6720 vec<tree, va_gc> *vec;
6722 mode =
6723 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6724 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6725 if (target)
6726 return target;
6728 /* If this is turned into an external library call, the weak parameter
6729 must be dropped to match the expected parameter list. */
6730 nargs = call_expr_nargs (exp);
6731 vec_alloc (vec, nargs - 1);
6732 for (z = 0; z < 3; z++)
6733 vec->quick_push (CALL_EXPR_ARG (exp, z));
6734 /* Skip the boolean weak parameter. */
6735 for (z = 4; z < 6; z++)
6736 vec->quick_push (CALL_EXPR_ARG (exp, z));
6737 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6738 break;
6741 case BUILT_IN_ATOMIC_LOAD_1:
6742 case BUILT_IN_ATOMIC_LOAD_2:
6743 case BUILT_IN_ATOMIC_LOAD_4:
6744 case BUILT_IN_ATOMIC_LOAD_8:
6745 case BUILT_IN_ATOMIC_LOAD_16:
6746 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6747 target = expand_builtin_atomic_load (mode, exp, target);
6748 if (target)
6749 return target;
6750 break;
6752 case BUILT_IN_ATOMIC_STORE_1:
6753 case BUILT_IN_ATOMIC_STORE_2:
6754 case BUILT_IN_ATOMIC_STORE_4:
6755 case BUILT_IN_ATOMIC_STORE_8:
6756 case BUILT_IN_ATOMIC_STORE_16:
6757 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6758 target = expand_builtin_atomic_store (mode, exp);
6759 if (target)
6760 return const0_rtx;
6761 break;
6763 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6764 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6765 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6766 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6767 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6769 enum built_in_function lib;
6770 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6771 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6772 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6773 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6774 ignore, lib);
6775 if (target)
6776 return target;
6777 break;
6779 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6780 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6781 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6782 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6783 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6785 enum built_in_function lib;
6786 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6787 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6788 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6789 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6790 ignore, lib);
6791 if (target)
6792 return target;
6793 break;
6795 case BUILT_IN_ATOMIC_AND_FETCH_1:
6796 case BUILT_IN_ATOMIC_AND_FETCH_2:
6797 case BUILT_IN_ATOMIC_AND_FETCH_4:
6798 case BUILT_IN_ATOMIC_AND_FETCH_8:
6799 case BUILT_IN_ATOMIC_AND_FETCH_16:
6801 enum built_in_function lib;
6802 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6803 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6804 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6805 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6806 ignore, lib);
6807 if (target)
6808 return target;
6809 break;
6811 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6812 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6813 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6814 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6815 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6817 enum built_in_function lib;
6818 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6819 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6820 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6821 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6822 ignore, lib);
6823 if (target)
6824 return target;
6825 break;
6827 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6828 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6829 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6830 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6831 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6833 enum built_in_function lib;
6834 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6835 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6836 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6837 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6838 ignore, lib);
6839 if (target)
6840 return target;
6841 break;
6843 case BUILT_IN_ATOMIC_OR_FETCH_1:
6844 case BUILT_IN_ATOMIC_OR_FETCH_2:
6845 case BUILT_IN_ATOMIC_OR_FETCH_4:
6846 case BUILT_IN_ATOMIC_OR_FETCH_8:
6847 case BUILT_IN_ATOMIC_OR_FETCH_16:
6849 enum built_in_function lib;
6850 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6851 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6852 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6853 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6854 ignore, lib);
6855 if (target)
6856 return target;
6857 break;
6859 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6860 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6861 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6862 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6863 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6864 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6865 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6866 ignore, BUILT_IN_NONE);
6867 if (target)
6868 return target;
6869 break;
6871 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6872 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6873 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6874 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6875 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6876 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6877 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6878 ignore, BUILT_IN_NONE);
6879 if (target)
6880 return target;
6881 break;
6883 case BUILT_IN_ATOMIC_FETCH_AND_1:
6884 case BUILT_IN_ATOMIC_FETCH_AND_2:
6885 case BUILT_IN_ATOMIC_FETCH_AND_4:
6886 case BUILT_IN_ATOMIC_FETCH_AND_8:
6887 case BUILT_IN_ATOMIC_FETCH_AND_16:
6888 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6889 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6890 ignore, BUILT_IN_NONE);
6891 if (target)
6892 return target;
6893 break;
6895 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6896 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6897 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6898 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6899 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6900 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6901 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6902 ignore, BUILT_IN_NONE);
6903 if (target)
6904 return target;
6905 break;
6907 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6908 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6909 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6910 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6911 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6912 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6913 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6914 ignore, BUILT_IN_NONE);
6915 if (target)
6916 return target;
6917 break;
6919 case BUILT_IN_ATOMIC_FETCH_OR_1:
6920 case BUILT_IN_ATOMIC_FETCH_OR_2:
6921 case BUILT_IN_ATOMIC_FETCH_OR_4:
6922 case BUILT_IN_ATOMIC_FETCH_OR_8:
6923 case BUILT_IN_ATOMIC_FETCH_OR_16:
6924 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6925 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6926 ignore, BUILT_IN_NONE);
6927 if (target)
6928 return target;
6929 break;
6931 case BUILT_IN_ATOMIC_TEST_AND_SET:
6932 return expand_builtin_atomic_test_and_set (exp, target);
6934 case BUILT_IN_ATOMIC_CLEAR:
6935 return expand_builtin_atomic_clear (exp);
6937 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6938 return expand_builtin_atomic_always_lock_free (exp);
6940 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6941 target = expand_builtin_atomic_is_lock_free (exp);
6942 if (target)
6943 return target;
6944 break;
6946 case BUILT_IN_ATOMIC_THREAD_FENCE:
6947 expand_builtin_atomic_thread_fence (exp);
6948 return const0_rtx;
6950 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6951 expand_builtin_atomic_signal_fence (exp);
6952 return const0_rtx;
6954 case BUILT_IN_OBJECT_SIZE:
6955 return expand_builtin_object_size (exp);
6957 case BUILT_IN_MEMCPY_CHK:
6958 case BUILT_IN_MEMPCPY_CHK:
6959 case BUILT_IN_MEMMOVE_CHK:
6960 case BUILT_IN_MEMSET_CHK:
6961 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6962 if (target)
6963 return target;
6964 break;
6966 case BUILT_IN_STRCPY_CHK:
6967 case BUILT_IN_STPCPY_CHK:
6968 case BUILT_IN_STRNCPY_CHK:
6969 case BUILT_IN_STPNCPY_CHK:
6970 case BUILT_IN_STRCAT_CHK:
6971 case BUILT_IN_STRNCAT_CHK:
6972 case BUILT_IN_SNPRINTF_CHK:
6973 case BUILT_IN_VSNPRINTF_CHK:
6974 maybe_emit_chk_warning (exp, fcode);
6975 break;
6977 case BUILT_IN_SPRINTF_CHK:
6978 case BUILT_IN_VSPRINTF_CHK:
6979 maybe_emit_sprintf_chk_warning (exp, fcode);
6980 break;
6982 case BUILT_IN_FREE:
6983 if (warn_free_nonheap_object)
6984 maybe_emit_free_warning (exp);
6985 break;
6987 case BUILT_IN_THREAD_POINTER:
6988 return expand_builtin_thread_pointer (exp, target);
6990 case BUILT_IN_SET_THREAD_POINTER:
6991 expand_builtin_set_thread_pointer (exp);
6992 return const0_rtx;
6994 case BUILT_IN_CILK_DETACH:
6995 expand_builtin_cilk_detach (exp);
6996 return const0_rtx;
6998 case BUILT_IN_CILK_POP_FRAME:
6999 expand_builtin_cilk_pop_frame (exp);
7000 return const0_rtx;
7002 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7003 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7004 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7005 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7006 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7007 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7008 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7009 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7010 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7011 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7012 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7013 /* We allow user CHKP builtins if Pointer Bounds
7014 Checker is off. */
7015 if (!chkp_function_instrumented_p (current_function_decl))
7017 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7018 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7019 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7020 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7021 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7022 return expand_normal (CALL_EXPR_ARG (exp, 0));
7023 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7024 return expand_normal (size_zero_node);
7025 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7026 return expand_normal (size_int (-1));
7027 else
7028 return const0_rtx;
7030 /* FALLTHROUGH */
7032 case BUILT_IN_CHKP_BNDMK:
7033 case BUILT_IN_CHKP_BNDSTX:
7034 case BUILT_IN_CHKP_BNDCL:
7035 case BUILT_IN_CHKP_BNDCU:
7036 case BUILT_IN_CHKP_BNDLDX:
7037 case BUILT_IN_CHKP_BNDRET:
7038 case BUILT_IN_CHKP_INTERSECT:
7039 case BUILT_IN_CHKP_NARROW:
7040 case BUILT_IN_CHKP_EXTRACT_LOWER:
7041 case BUILT_IN_CHKP_EXTRACT_UPPER:
7042 /* Software implementation of Pointer Bounds Checker is NYI.
7043 Target support is required. */
7044 error ("Your target platform does not support -fcheck-pointer-bounds");
7045 break;
7047 case BUILT_IN_ACC_ON_DEVICE:
7048 target = expand_builtin_acc_on_device (exp, target);
7049 if (target)
7050 return target;
7051 break;
7053 default: /* just do library call, if unknown builtin */
7054 break;
7057 /* The switch statement above can drop through to cause the function
7058 to be called normally. */
7059 return expand_call (exp, target, ignore);
7062 /* Similar to expand_builtin but is used for instrumented calls. */
7065 expand_builtin_with_bounds (tree exp, rtx target,
7066 rtx subtarget ATTRIBUTE_UNUSED,
7067 machine_mode mode, int ignore)
7069 tree fndecl = get_callee_fndecl (exp);
7070 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7072 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7074 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7075 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7077 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7078 && fcode < END_CHKP_BUILTINS);
7080 switch (fcode)
7082 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7083 target = expand_builtin_memcpy_with_bounds (exp, target);
7084 if (target)
7085 return target;
7086 break;
7088 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7089 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7090 if (target)
7091 return target;
7092 break;
7094 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7095 target = expand_builtin_memset_with_bounds (exp, target, mode);
7096 if (target)
7097 return target;
7098 break;
7100 default:
7101 break;
7104 /* The switch statement above can drop through to cause the function
7105 to be called normally. */
7106 return expand_call (exp, target, ignore);
7109 /* Determine whether a tree node represents a call to a built-in
7110 function. If the tree T is a call to a built-in function with
7111 the right number of arguments of the appropriate types, return
7112 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7113 Otherwise the return value is END_BUILTINS. */
7115 enum built_in_function
7116 builtin_mathfn_code (const_tree t)
7118 const_tree fndecl, arg, parmlist;
7119 const_tree argtype, parmtype;
7120 const_call_expr_arg_iterator iter;
7122 if (TREE_CODE (t) != CALL_EXPR
7123 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7124 return END_BUILTINS;
7126 fndecl = get_callee_fndecl (t);
7127 if (fndecl == NULL_TREE
7128 || TREE_CODE (fndecl) != FUNCTION_DECL
7129 || ! DECL_BUILT_IN (fndecl)
7130 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7131 return END_BUILTINS;
7133 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7134 init_const_call_expr_arg_iterator (t, &iter);
7135 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7137 /* If a function doesn't take a variable number of arguments,
7138 the last element in the list will have type `void'. */
7139 parmtype = TREE_VALUE (parmlist);
7140 if (VOID_TYPE_P (parmtype))
7142 if (more_const_call_expr_args_p (&iter))
7143 return END_BUILTINS;
7144 return DECL_FUNCTION_CODE (fndecl);
7147 if (! more_const_call_expr_args_p (&iter))
7148 return END_BUILTINS;
7150 arg = next_const_call_expr_arg (&iter);
7151 argtype = TREE_TYPE (arg);
7153 if (SCALAR_FLOAT_TYPE_P (parmtype))
7155 if (! SCALAR_FLOAT_TYPE_P (argtype))
7156 return END_BUILTINS;
7158 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7160 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7161 return END_BUILTINS;
7163 else if (POINTER_TYPE_P (parmtype))
7165 if (! POINTER_TYPE_P (argtype))
7166 return END_BUILTINS;
7168 else if (INTEGRAL_TYPE_P (parmtype))
7170 if (! INTEGRAL_TYPE_P (argtype))
7171 return END_BUILTINS;
7173 else
7174 return END_BUILTINS;
7177 /* Variable-length argument list. */
7178 return DECL_FUNCTION_CODE (fndecl);
7181 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7182 evaluate to a constant. */
7184 static tree
7185 fold_builtin_constant_p (tree arg)
7187 /* We return 1 for a numeric type that's known to be a constant
7188 value at compile-time or for an aggregate type that's a
7189 literal constant. */
7190 STRIP_NOPS (arg);
7192 /* If we know this is a constant, emit the constant of one. */
7193 if (CONSTANT_CLASS_P (arg)
7194 || (TREE_CODE (arg) == CONSTRUCTOR
7195 && TREE_CONSTANT (arg)))
7196 return integer_one_node;
7197 if (TREE_CODE (arg) == ADDR_EXPR)
7199 tree op = TREE_OPERAND (arg, 0);
7200 if (TREE_CODE (op) == STRING_CST
7201 || (TREE_CODE (op) == ARRAY_REF
7202 && integer_zerop (TREE_OPERAND (op, 1))
7203 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7204 return integer_one_node;
7207 /* If this expression has side effects, show we don't know it to be a
7208 constant. Likewise if it's a pointer or aggregate type since in
7209 those case we only want literals, since those are only optimized
7210 when generating RTL, not later.
7211 And finally, if we are compiling an initializer, not code, we
7212 need to return a definite result now; there's not going to be any
7213 more optimization done. */
7214 if (TREE_SIDE_EFFECTS (arg)
7215 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7216 || POINTER_TYPE_P (TREE_TYPE (arg))
7217 || cfun == 0
7218 || folding_initializer
7219 || force_folding_builtin_constant_p)
7220 return integer_zero_node;
7222 return NULL_TREE;
7225 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7226 return it as a truthvalue. */
7228 static tree
7229 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7230 tree predictor)
7232 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7234 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7235 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7236 ret_type = TREE_TYPE (TREE_TYPE (fn));
7237 pred_type = TREE_VALUE (arg_types);
7238 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7240 pred = fold_convert_loc (loc, pred_type, pred);
7241 expected = fold_convert_loc (loc, expected_type, expected);
7242 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7243 predictor);
7245 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7246 build_int_cst (ret_type, 0));
7249 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7250 NULL_TREE if no simplification is possible. */
7252 tree
7253 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7255 tree inner, fndecl, inner_arg0;
7256 enum tree_code code;
7258 /* Distribute the expected value over short-circuiting operators.
7259 See through the cast from truthvalue_type_node to long. */
7260 inner_arg0 = arg0;
7261 while (CONVERT_EXPR_P (inner_arg0)
7262 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7263 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7264 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7266 /* If this is a builtin_expect within a builtin_expect keep the
7267 inner one. See through a comparison against a constant. It
7268 might have been added to create a thruthvalue. */
7269 inner = inner_arg0;
7271 if (COMPARISON_CLASS_P (inner)
7272 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7273 inner = TREE_OPERAND (inner, 0);
7275 if (TREE_CODE (inner) == CALL_EXPR
7276 && (fndecl = get_callee_fndecl (inner))
7277 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7278 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7279 return arg0;
7281 inner = inner_arg0;
7282 code = TREE_CODE (inner);
7283 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7285 tree op0 = TREE_OPERAND (inner, 0);
7286 tree op1 = TREE_OPERAND (inner, 1);
7288 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7289 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7290 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7292 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7295 /* If the argument isn't invariant then there's nothing else we can do. */
7296 if (!TREE_CONSTANT (inner_arg0))
7297 return NULL_TREE;
7299 /* If we expect that a comparison against the argument will fold to
7300 a constant return the constant. In practice, this means a true
7301 constant or the address of a non-weak symbol. */
7302 inner = inner_arg0;
7303 STRIP_NOPS (inner);
7304 if (TREE_CODE (inner) == ADDR_EXPR)
7308 inner = TREE_OPERAND (inner, 0);
7310 while (TREE_CODE (inner) == COMPONENT_REF
7311 || TREE_CODE (inner) == ARRAY_REF);
7312 if ((TREE_CODE (inner) == VAR_DECL
7313 || TREE_CODE (inner) == FUNCTION_DECL)
7314 && DECL_WEAK (inner))
7315 return NULL_TREE;
7318 /* Otherwise, ARG0 already has the proper type for the return value. */
7319 return arg0;
7322 /* Fold a call to __builtin_classify_type with argument ARG. */
7324 static tree
7325 fold_builtin_classify_type (tree arg)
7327 if (arg == 0)
7328 return build_int_cst (integer_type_node, no_type_class);
7330 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7333 /* Fold a call to __builtin_strlen with argument ARG. */
7335 static tree
7336 fold_builtin_strlen (location_t loc, tree type, tree arg)
7338 if (!validate_arg (arg, POINTER_TYPE))
7339 return NULL_TREE;
7340 else
7342 tree len = c_strlen (arg, 0);
7344 if (len)
7345 return fold_convert_loc (loc, type, len);
7347 return NULL_TREE;
7351 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7353 static tree
7354 fold_builtin_inf (location_t loc, tree type, int warn)
7356 REAL_VALUE_TYPE real;
7358 /* __builtin_inff is intended to be usable to define INFINITY on all
7359 targets. If an infinity is not available, INFINITY expands "to a
7360 positive constant of type float that overflows at translation
7361 time", footnote "In this case, using INFINITY will violate the
7362 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7363 Thus we pedwarn to ensure this constraint violation is
7364 diagnosed. */
7365 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7366 pedwarn (loc, 0, "target format does not support infinity");
7368 real_inf (&real);
7369 return build_real (type, real);
7372 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7374 static tree
7375 fold_builtin_nan (tree arg, tree type, int quiet)
7377 REAL_VALUE_TYPE real;
7378 const char *str;
7380 if (!validate_arg (arg, POINTER_TYPE))
7381 return NULL_TREE;
7382 str = c_getstr (arg);
7383 if (!str)
7384 return NULL_TREE;
7386 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7387 return NULL_TREE;
7389 return build_real (type, real);
7392 /* Return true if the floating point expression T has an integer value.
7393 We also allow +Inf, -Inf and NaN to be considered integer values. */
7395 static bool
7396 integer_valued_real_p (tree t)
7398 switch (TREE_CODE (t))
7400 case FLOAT_EXPR:
7401 return true;
7403 case ABS_EXPR:
7404 case SAVE_EXPR:
7405 return integer_valued_real_p (TREE_OPERAND (t, 0));
7407 case COMPOUND_EXPR:
7408 case MODIFY_EXPR:
7409 case BIND_EXPR:
7410 return integer_valued_real_p (TREE_OPERAND (t, 1));
7412 case PLUS_EXPR:
7413 case MINUS_EXPR:
7414 case MULT_EXPR:
7415 case MIN_EXPR:
7416 case MAX_EXPR:
7417 return integer_valued_real_p (TREE_OPERAND (t, 0))
7418 && integer_valued_real_p (TREE_OPERAND (t, 1));
7420 case COND_EXPR:
7421 return integer_valued_real_p (TREE_OPERAND (t, 1))
7422 && integer_valued_real_p (TREE_OPERAND (t, 2));
7424 case REAL_CST:
7425 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7427 CASE_CONVERT:
7429 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7430 if (TREE_CODE (type) == INTEGER_TYPE)
7431 return true;
7432 if (TREE_CODE (type) == REAL_TYPE)
7433 return integer_valued_real_p (TREE_OPERAND (t, 0));
7434 break;
7437 case CALL_EXPR:
7438 switch (builtin_mathfn_code (t))
7440 CASE_FLT_FN (BUILT_IN_CEIL):
7441 CASE_FLT_FN (BUILT_IN_FLOOR):
7442 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7443 CASE_FLT_FN (BUILT_IN_RINT):
7444 CASE_FLT_FN (BUILT_IN_ROUND):
7445 CASE_FLT_FN (BUILT_IN_TRUNC):
7446 return true;
7448 CASE_FLT_FN (BUILT_IN_FMIN):
7449 CASE_FLT_FN (BUILT_IN_FMAX):
7450 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7451 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7453 default:
7454 break;
7456 break;
7458 default:
7459 break;
7461 return false;
7464 /* FNDECL is assumed to be a builtin where truncation can be propagated
7465 across (for instance floor((double)f) == (double)floorf (f).
7466 Do the transformation for a call with argument ARG. */
7468 static tree
7469 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7471 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7473 if (!validate_arg (arg, REAL_TYPE))
7474 return NULL_TREE;
7476 /* Integer rounding functions are idempotent. */
7477 if (fcode == builtin_mathfn_code (arg))
7478 return arg;
7480 /* If argument is already integer valued, and we don't need to worry
7481 about setting errno, there's no need to perform rounding. */
7482 if (! flag_errno_math && integer_valued_real_p (arg))
7483 return arg;
7485 if (optimize)
7487 tree arg0 = strip_float_extensions (arg);
7488 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7489 tree newtype = TREE_TYPE (arg0);
7490 tree decl;
7492 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7493 && (decl = mathfn_built_in (newtype, fcode)))
7494 return fold_convert_loc (loc, ftype,
7495 build_call_expr_loc (loc, decl, 1,
7496 fold_convert_loc (loc,
7497 newtype,
7498 arg0)));
7500 return NULL_TREE;
7503 /* FNDECL is assumed to be builtin which can narrow the FP type of
7504 the argument, for instance lround((double)f) -> lroundf (f).
7505 Do the transformation for a call with argument ARG. */
7507 static tree
7508 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7510 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7512 if (!validate_arg (arg, REAL_TYPE))
7513 return NULL_TREE;
7515 /* If argument is already integer valued, and we don't need to worry
7516 about setting errno, there's no need to perform rounding. */
7517 if (! flag_errno_math && integer_valued_real_p (arg))
7518 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7519 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7521 if (optimize)
7523 tree ftype = TREE_TYPE (arg);
7524 tree arg0 = strip_float_extensions (arg);
7525 tree newtype = TREE_TYPE (arg0);
7526 tree decl;
7528 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7529 && (decl = mathfn_built_in (newtype, fcode)))
7530 return build_call_expr_loc (loc, decl, 1,
7531 fold_convert_loc (loc, newtype, arg0));
7534 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7535 sizeof (int) == sizeof (long). */
7536 if (TYPE_PRECISION (integer_type_node)
7537 == TYPE_PRECISION (long_integer_type_node))
7539 tree newfn = NULL_TREE;
7540 switch (fcode)
7542 CASE_FLT_FN (BUILT_IN_ICEIL):
7543 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7544 break;
7546 CASE_FLT_FN (BUILT_IN_IFLOOR):
7547 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7548 break;
7550 CASE_FLT_FN (BUILT_IN_IROUND):
7551 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7552 break;
7554 CASE_FLT_FN (BUILT_IN_IRINT):
7555 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7556 break;
7558 default:
7559 break;
7562 if (newfn)
7564 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7565 return fold_convert_loc (loc,
7566 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7570 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7571 sizeof (long long) == sizeof (long). */
7572 if (TYPE_PRECISION (long_long_integer_type_node)
7573 == TYPE_PRECISION (long_integer_type_node))
7575 tree newfn = NULL_TREE;
7576 switch (fcode)
7578 CASE_FLT_FN (BUILT_IN_LLCEIL):
7579 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7580 break;
7582 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7583 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7584 break;
7586 CASE_FLT_FN (BUILT_IN_LLROUND):
7587 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7588 break;
7590 CASE_FLT_FN (BUILT_IN_LLRINT):
7591 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7592 break;
7594 default:
7595 break;
7598 if (newfn)
7600 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7601 return fold_convert_loc (loc,
7602 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7606 return NULL_TREE;
7609 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7610 return type. Return NULL_TREE if no simplification can be made. */
7612 static tree
7613 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7615 tree res;
7617 if (!validate_arg (arg, COMPLEX_TYPE)
7618 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7619 return NULL_TREE;
7621 /* Calculate the result when the argument is a constant. */
7622 if (TREE_CODE (arg) == COMPLEX_CST
7623 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7624 type, mpfr_hypot)))
7625 return res;
7627 if (TREE_CODE (arg) == COMPLEX_EXPR)
7629 tree real = TREE_OPERAND (arg, 0);
7630 tree imag = TREE_OPERAND (arg, 1);
7632 /* If either part is zero, cabs is fabs of the other. */
7633 if (real_zerop (real))
7634 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7635 if (real_zerop (imag))
7636 return fold_build1_loc (loc, ABS_EXPR, type, real);
7638 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7639 if (flag_unsafe_math_optimizations
7640 && operand_equal_p (real, imag, OEP_PURE_SAME))
7642 const REAL_VALUE_TYPE sqrt2_trunc
7643 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7644 STRIP_NOPS (real);
7645 return fold_build2_loc (loc, MULT_EXPR, type,
7646 fold_build1_loc (loc, ABS_EXPR, type, real),
7647 build_real (type, sqrt2_trunc));
7651 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7652 if (TREE_CODE (arg) == NEGATE_EXPR
7653 || TREE_CODE (arg) == CONJ_EXPR)
7654 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7656 /* Don't do this when optimizing for size. */
7657 if (flag_unsafe_math_optimizations
7658 && optimize && optimize_function_for_speed_p (cfun))
7660 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7662 if (sqrtfn != NULL_TREE)
7664 tree rpart, ipart, result;
7666 arg = builtin_save_expr (arg);
7668 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7669 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7671 rpart = builtin_save_expr (rpart);
7672 ipart = builtin_save_expr (ipart);
7674 result = fold_build2_loc (loc, PLUS_EXPR, type,
7675 fold_build2_loc (loc, MULT_EXPR, type,
7676 rpart, rpart),
7677 fold_build2_loc (loc, MULT_EXPR, type,
7678 ipart, ipart));
7680 return build_call_expr_loc (loc, sqrtfn, 1, result);
7684 return NULL_TREE;
7687 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7688 complex tree type of the result. If NEG is true, the imaginary
7689 zero is negative. */
7691 static tree
7692 build_complex_cproj (tree type, bool neg)
7694 REAL_VALUE_TYPE rinf, rzero = dconst0;
7696 real_inf (&rinf);
7697 rzero.sign = neg;
7698 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7699 build_real (TREE_TYPE (type), rzero));
7702 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7703 return type. Return NULL_TREE if no simplification can be made. */
7705 static tree
7706 fold_builtin_cproj (location_t loc, tree arg, tree type)
7708 if (!validate_arg (arg, COMPLEX_TYPE)
7709 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7710 return NULL_TREE;
7712 /* If there are no infinities, return arg. */
7713 if (! HONOR_INFINITIES (type))
7714 return non_lvalue_loc (loc, arg);
7716 /* Calculate the result when the argument is a constant. */
7717 if (TREE_CODE (arg) == COMPLEX_CST)
7719 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7720 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7722 if (real_isinf (real) || real_isinf (imag))
7723 return build_complex_cproj (type, imag->sign);
7724 else
7725 return arg;
7727 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7729 tree real = TREE_OPERAND (arg, 0);
7730 tree imag = TREE_OPERAND (arg, 1);
7732 STRIP_NOPS (real);
7733 STRIP_NOPS (imag);
7735 /* If the real part is inf and the imag part is known to be
7736 nonnegative, return (inf + 0i). Remember side-effects are
7737 possible in the imag part. */
7738 if (TREE_CODE (real) == REAL_CST
7739 && real_isinf (TREE_REAL_CST_PTR (real))
7740 && tree_expr_nonnegative_p (imag))
7741 return omit_one_operand_loc (loc, type,
7742 build_complex_cproj (type, false),
7743 arg);
7745 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7746 Remember side-effects are possible in the real part. */
7747 if (TREE_CODE (imag) == REAL_CST
7748 && real_isinf (TREE_REAL_CST_PTR (imag)))
7749 return
7750 omit_one_operand_loc (loc, type,
7751 build_complex_cproj (type, TREE_REAL_CST_PTR
7752 (imag)->sign), arg);
7755 return NULL_TREE;
7758 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7759 Return NULL_TREE if no simplification can be made. */
7761 static tree
7762 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7765 enum built_in_function fcode;
7766 tree res;
7768 if (!validate_arg (arg, REAL_TYPE))
7769 return NULL_TREE;
7771 /* Calculate the result when the argument is a constant. */
7772 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7773 return res;
7775 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7776 fcode = builtin_mathfn_code (arg);
7777 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7779 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7780 arg = fold_build2_loc (loc, MULT_EXPR, type,
7781 CALL_EXPR_ARG (arg, 0),
7782 build_real (type, dconsthalf));
7783 return build_call_expr_loc (loc, expfn, 1, arg);
7786 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7787 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7789 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7791 if (powfn)
7793 tree arg0 = CALL_EXPR_ARG (arg, 0);
7794 tree tree_root;
7795 /* The inner root was either sqrt or cbrt. */
7796 /* This was a conditional expression but it triggered a bug
7797 in Sun C 5.5. */
7798 REAL_VALUE_TYPE dconstroot;
7799 if (BUILTIN_SQRT_P (fcode))
7800 dconstroot = dconsthalf;
7801 else
7802 dconstroot = dconst_third ();
7804 /* Adjust for the outer root. */
7805 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7806 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7807 tree_root = build_real (type, dconstroot);
7808 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7812 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7813 if (flag_unsafe_math_optimizations
7814 && (fcode == BUILT_IN_POW
7815 || fcode == BUILT_IN_POWF
7816 || fcode == BUILT_IN_POWL))
7818 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7819 tree arg0 = CALL_EXPR_ARG (arg, 0);
7820 tree arg1 = CALL_EXPR_ARG (arg, 1);
7821 tree narg1;
7822 if (!tree_expr_nonnegative_p (arg0))
7823 arg0 = build1 (ABS_EXPR, type, arg0);
7824 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7825 build_real (type, dconsthalf));
7826 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7829 return NULL_TREE;
7832 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7833 Return NULL_TREE if no simplification can be made. */
7835 static tree
7836 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7838 const enum built_in_function fcode = builtin_mathfn_code (arg);
7839 tree res;
7841 if (!validate_arg (arg, REAL_TYPE))
7842 return NULL_TREE;
7844 /* Calculate the result when the argument is a constant. */
7845 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7846 return res;
7848 if (flag_unsafe_math_optimizations)
7850 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7851 if (BUILTIN_EXPONENT_P (fcode))
7853 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7854 const REAL_VALUE_TYPE third_trunc =
7855 real_value_truncate (TYPE_MODE (type), dconst_third ());
7856 arg = fold_build2_loc (loc, MULT_EXPR, type,
7857 CALL_EXPR_ARG (arg, 0),
7858 build_real (type, third_trunc));
7859 return build_call_expr_loc (loc, expfn, 1, arg);
7862 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7863 if (BUILTIN_SQRT_P (fcode))
7865 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7867 if (powfn)
7869 tree arg0 = CALL_EXPR_ARG (arg, 0);
7870 tree tree_root;
7871 REAL_VALUE_TYPE dconstroot = dconst_third ();
7873 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7874 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7875 tree_root = build_real (type, dconstroot);
7876 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7880 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7881 if (BUILTIN_CBRT_P (fcode))
7883 tree arg0 = CALL_EXPR_ARG (arg, 0);
7884 if (tree_expr_nonnegative_p (arg0))
7886 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7888 if (powfn)
7890 tree tree_root;
7891 REAL_VALUE_TYPE dconstroot;
7893 real_arithmetic (&dconstroot, MULT_EXPR,
7894 dconst_third_ptr (), dconst_third_ptr ());
7895 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7896 tree_root = build_real (type, dconstroot);
7897 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7902 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7903 if (fcode == BUILT_IN_POW
7904 || fcode == BUILT_IN_POWF
7905 || fcode == BUILT_IN_POWL)
7907 tree arg00 = CALL_EXPR_ARG (arg, 0);
7908 tree arg01 = CALL_EXPR_ARG (arg, 1);
7909 if (tree_expr_nonnegative_p (arg00))
7911 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7912 const REAL_VALUE_TYPE dconstroot
7913 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7914 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7915 build_real (type, dconstroot));
7916 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7920 return NULL_TREE;
7923 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7924 TYPE is the type of the return value. Return NULL_TREE if no
7925 simplification can be made. */
7927 static tree
7928 fold_builtin_cos (location_t loc,
7929 tree arg, tree type, tree fndecl)
7931 tree res, narg;
7933 if (!validate_arg (arg, REAL_TYPE))
7934 return NULL_TREE;
7936 /* Calculate the result when the argument is a constant. */
7937 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7938 return res;
7940 /* Optimize cos(-x) into cos (x). */
7941 if ((narg = fold_strip_sign_ops (arg)))
7942 return build_call_expr_loc (loc, fndecl, 1, narg);
7944 return NULL_TREE;
7947 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7948 Return NULL_TREE if no simplification can be made. */
7950 static tree
7951 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7953 if (validate_arg (arg, REAL_TYPE))
7955 tree res, narg;
7957 /* Calculate the result when the argument is a constant. */
7958 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7959 return res;
7961 /* Optimize cosh(-x) into cosh (x). */
7962 if ((narg = fold_strip_sign_ops (arg)))
7963 return build_call_expr_loc (loc, fndecl, 1, narg);
7966 return NULL_TREE;
7969 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7970 argument ARG. TYPE is the type of the return value. Return
7971 NULL_TREE if no simplification can be made. */
7973 static tree
7974 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7975 bool hyper)
7977 if (validate_arg (arg, COMPLEX_TYPE)
7978 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7980 tree tmp;
7982 /* Calculate the result when the argument is a constant. */
7983 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7984 return tmp;
7986 /* Optimize fn(-x) into fn(x). */
7987 if ((tmp = fold_strip_sign_ops (arg)))
7988 return build_call_expr_loc (loc, fndecl, 1, tmp);
7991 return NULL_TREE;
7994 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7995 Return NULL_TREE if no simplification can be made. */
7997 static tree
7998 fold_builtin_tan (tree arg, tree type)
8000 enum built_in_function fcode;
8001 tree res;
8003 if (!validate_arg (arg, REAL_TYPE))
8004 return NULL_TREE;
8006 /* Calculate the result when the argument is a constant. */
8007 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
8008 return res;
8010 /* Optimize tan(atan(x)) = x. */
8011 fcode = builtin_mathfn_code (arg);
8012 if (flag_unsafe_math_optimizations
8013 && (fcode == BUILT_IN_ATAN
8014 || fcode == BUILT_IN_ATANF
8015 || fcode == BUILT_IN_ATANL))
8016 return CALL_EXPR_ARG (arg, 0);
8018 return NULL_TREE;
8021 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8022 NULL_TREE if no simplification can be made. */
8024 static tree
8025 fold_builtin_sincos (location_t loc,
8026 tree arg0, tree arg1, tree arg2)
8028 tree type;
8029 tree res, fn, call;
8031 if (!validate_arg (arg0, REAL_TYPE)
8032 || !validate_arg (arg1, POINTER_TYPE)
8033 || !validate_arg (arg2, POINTER_TYPE))
8034 return NULL_TREE;
8036 type = TREE_TYPE (arg0);
8038 /* Calculate the result when the argument is a constant. */
8039 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8040 return res;
8042 /* Canonicalize sincos to cexpi. */
8043 if (!targetm.libc_has_function (function_c99_math_complex))
8044 return NULL_TREE;
8045 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8046 if (!fn)
8047 return NULL_TREE;
8049 call = build_call_expr_loc (loc, fn, 1, arg0);
8050 call = builtin_save_expr (call);
8052 return build2 (COMPOUND_EXPR, void_type_node,
8053 build2 (MODIFY_EXPR, void_type_node,
8054 build_fold_indirect_ref_loc (loc, arg1),
8055 build1 (IMAGPART_EXPR, type, call)),
8056 build2 (MODIFY_EXPR, void_type_node,
8057 build_fold_indirect_ref_loc (loc, arg2),
8058 build1 (REALPART_EXPR, type, call)));
8061 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8062 NULL_TREE if no simplification can be made. */
8064 static tree
8065 fold_builtin_cexp (location_t loc, tree arg0, tree type)
8067 tree rtype;
8068 tree realp, imagp, ifn;
8069 tree res;
8071 if (!validate_arg (arg0, COMPLEX_TYPE)
8072 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
8073 return NULL_TREE;
8075 /* Calculate the result when the argument is a constant. */
8076 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8077 return res;
8079 rtype = TREE_TYPE (TREE_TYPE (arg0));
8081 /* In case we can figure out the real part of arg0 and it is constant zero
8082 fold to cexpi. */
8083 if (!targetm.libc_has_function (function_c99_math_complex))
8084 return NULL_TREE;
8085 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8086 if (!ifn)
8087 return NULL_TREE;
8089 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
8090 && real_zerop (realp))
8092 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8093 return build_call_expr_loc (loc, ifn, 1, narg);
8096 /* In case we can easily decompose real and imaginary parts split cexp
8097 to exp (r) * cexpi (i). */
8098 if (flag_unsafe_math_optimizations
8099 && realp)
8101 tree rfn, rcall, icall;
8103 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8104 if (!rfn)
8105 return NULL_TREE;
8107 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
8108 if (!imagp)
8109 return NULL_TREE;
8111 icall = build_call_expr_loc (loc, ifn, 1, imagp);
8112 icall = builtin_save_expr (icall);
8113 rcall = build_call_expr_loc (loc, rfn, 1, realp);
8114 rcall = builtin_save_expr (rcall);
8115 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8116 fold_build2_loc (loc, MULT_EXPR, rtype,
8117 rcall,
8118 fold_build1_loc (loc, REALPART_EXPR,
8119 rtype, icall)),
8120 fold_build2_loc (loc, MULT_EXPR, rtype,
8121 rcall,
8122 fold_build1_loc (loc, IMAGPART_EXPR,
8123 rtype, icall)));
8126 return NULL_TREE;
8129 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8130 Return NULL_TREE if no simplification can be made. */
8132 static tree
8133 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
8135 if (!validate_arg (arg, REAL_TYPE))
8136 return NULL_TREE;
8138 /* Optimize trunc of constant value. */
8139 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8141 REAL_VALUE_TYPE r, x;
8142 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8144 x = TREE_REAL_CST (arg);
8145 real_trunc (&r, TYPE_MODE (type), &x);
8146 return build_real (type, r);
8149 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8152 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8153 Return NULL_TREE if no simplification can be made. */
8155 static tree
8156 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
8158 if (!validate_arg (arg, REAL_TYPE))
8159 return NULL_TREE;
8161 /* Optimize floor of constant value. */
8162 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8164 REAL_VALUE_TYPE x;
8166 x = TREE_REAL_CST (arg);
8167 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8169 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8170 REAL_VALUE_TYPE r;
8172 real_floor (&r, TYPE_MODE (type), &x);
8173 return build_real (type, r);
8177 /* Fold floor (x) where x is nonnegative to trunc (x). */
8178 if (tree_expr_nonnegative_p (arg))
8180 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8181 if (truncfn)
8182 return build_call_expr_loc (loc, truncfn, 1, arg);
8185 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8188 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8189 Return NULL_TREE if no simplification can be made. */
8191 static tree
8192 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8194 if (!validate_arg (arg, REAL_TYPE))
8195 return NULL_TREE;
8197 /* Optimize ceil of constant value. */
8198 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8200 REAL_VALUE_TYPE x;
8202 x = TREE_REAL_CST (arg);
8203 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8205 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8206 REAL_VALUE_TYPE r;
8208 real_ceil (&r, TYPE_MODE (type), &x);
8209 return build_real (type, r);
8213 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8216 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8217 Return NULL_TREE if no simplification can be made. */
8219 static tree
8220 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8222 if (!validate_arg (arg, REAL_TYPE))
8223 return NULL_TREE;
8225 /* Optimize round of constant value. */
8226 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8228 REAL_VALUE_TYPE x;
8230 x = TREE_REAL_CST (arg);
8231 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8233 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8234 REAL_VALUE_TYPE r;
8236 real_round (&r, TYPE_MODE (type), &x);
8237 return build_real (type, r);
8241 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8244 /* Fold function call to builtin lround, lroundf or lroundl (or the
8245 corresponding long long versions) and other rounding functions. ARG
8246 is the argument to the call. Return NULL_TREE if no simplification
8247 can be made. */
8249 static tree
8250 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8252 if (!validate_arg (arg, REAL_TYPE))
8253 return NULL_TREE;
8255 /* Optimize lround of constant value. */
8256 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8258 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8260 if (real_isfinite (&x))
8262 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8263 tree ftype = TREE_TYPE (arg);
8264 REAL_VALUE_TYPE r;
8265 bool fail = false;
8267 switch (DECL_FUNCTION_CODE (fndecl))
8269 CASE_FLT_FN (BUILT_IN_IFLOOR):
8270 CASE_FLT_FN (BUILT_IN_LFLOOR):
8271 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8272 real_floor (&r, TYPE_MODE (ftype), &x);
8273 break;
8275 CASE_FLT_FN (BUILT_IN_ICEIL):
8276 CASE_FLT_FN (BUILT_IN_LCEIL):
8277 CASE_FLT_FN (BUILT_IN_LLCEIL):
8278 real_ceil (&r, TYPE_MODE (ftype), &x);
8279 break;
8281 CASE_FLT_FN (BUILT_IN_IROUND):
8282 CASE_FLT_FN (BUILT_IN_LROUND):
8283 CASE_FLT_FN (BUILT_IN_LLROUND):
8284 real_round (&r, TYPE_MODE (ftype), &x);
8285 break;
8287 default:
8288 gcc_unreachable ();
8291 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8292 if (!fail)
8293 return wide_int_to_tree (itype, val);
8297 switch (DECL_FUNCTION_CODE (fndecl))
8299 CASE_FLT_FN (BUILT_IN_LFLOOR):
8300 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8301 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8302 if (tree_expr_nonnegative_p (arg))
8303 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8304 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8305 break;
8306 default:;
8309 return fold_fixed_mathfn (loc, fndecl, arg);
8312 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8313 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8314 the argument to the call. Return NULL_TREE if no simplification can
8315 be made. */
8317 static tree
8318 fold_builtin_bitop (tree fndecl, tree arg)
8320 if (!validate_arg (arg, INTEGER_TYPE))
8321 return NULL_TREE;
8323 /* Optimize for constant argument. */
8324 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8326 tree type = TREE_TYPE (arg);
8327 int result;
8329 switch (DECL_FUNCTION_CODE (fndecl))
8331 CASE_INT_FN (BUILT_IN_FFS):
8332 result = wi::ffs (arg);
8333 break;
8335 CASE_INT_FN (BUILT_IN_CLZ):
8336 if (wi::ne_p (arg, 0))
8337 result = wi::clz (arg);
8338 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8339 result = TYPE_PRECISION (type);
8340 break;
8342 CASE_INT_FN (BUILT_IN_CTZ):
8343 if (wi::ne_p (arg, 0))
8344 result = wi::ctz (arg);
8345 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8346 result = TYPE_PRECISION (type);
8347 break;
8349 CASE_INT_FN (BUILT_IN_CLRSB):
8350 result = wi::clrsb (arg);
8351 break;
8353 CASE_INT_FN (BUILT_IN_POPCOUNT):
8354 result = wi::popcount (arg);
8355 break;
8357 CASE_INT_FN (BUILT_IN_PARITY):
8358 result = wi::parity (arg);
8359 break;
8361 default:
8362 gcc_unreachable ();
8365 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8368 return NULL_TREE;
8371 /* Fold function call to builtin_bswap and the short, long and long long
8372 variants. Return NULL_TREE if no simplification can be made. */
8373 static tree
8374 fold_builtin_bswap (tree fndecl, tree arg)
8376 if (! validate_arg (arg, INTEGER_TYPE))
8377 return NULL_TREE;
8379 /* Optimize constant value. */
8380 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8382 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8384 switch (DECL_FUNCTION_CODE (fndecl))
8386 case BUILT_IN_BSWAP16:
8387 case BUILT_IN_BSWAP32:
8388 case BUILT_IN_BSWAP64:
8390 signop sgn = TYPE_SIGN (type);
8391 tree result =
8392 wide_int_to_tree (type,
8393 wide_int::from (arg, TYPE_PRECISION (type),
8394 sgn).bswap ());
8395 return result;
8397 default:
8398 gcc_unreachable ();
8402 return NULL_TREE;
8405 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8406 NULL_TREE if no simplification can be made. */
8408 static tree
8409 fold_builtin_hypot (location_t loc, tree fndecl,
8410 tree arg0, tree arg1, tree type)
8412 tree res, narg0, narg1;
8414 if (!validate_arg (arg0, REAL_TYPE)
8415 || !validate_arg (arg1, REAL_TYPE))
8416 return NULL_TREE;
8418 /* Calculate the result when the argument is a constant. */
8419 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8420 return res;
8422 /* If either argument to hypot has a negate or abs, strip that off.
8423 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8424 narg0 = fold_strip_sign_ops (arg0);
8425 narg1 = fold_strip_sign_ops (arg1);
8426 if (narg0 || narg1)
8428 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8429 narg1 ? narg1 : arg1);
8432 /* If either argument is zero, hypot is fabs of the other. */
8433 if (real_zerop (arg0))
8434 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8435 else if (real_zerop (arg1))
8436 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8438 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8439 if (flag_unsafe_math_optimizations
8440 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8442 const REAL_VALUE_TYPE sqrt2_trunc
8443 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8444 return fold_build2_loc (loc, MULT_EXPR, type,
8445 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8446 build_real (type, sqrt2_trunc));
8449 return NULL_TREE;
8453 /* Fold a builtin function call to pow, powf, or powl. Return
8454 NULL_TREE if no simplification can be made. */
8455 static tree
8456 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8458 tree res;
8460 if (!validate_arg (arg0, REAL_TYPE)
8461 || !validate_arg (arg1, REAL_TYPE))
8462 return NULL_TREE;
8464 /* Calculate the result when the argument is a constant. */
8465 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8466 return res;
8468 /* Optimize pow(1.0,y) = 1.0. */
8469 if (real_onep (arg0))
8470 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8472 if (TREE_CODE (arg1) == REAL_CST
8473 && !TREE_OVERFLOW (arg1))
8475 REAL_VALUE_TYPE cint;
8476 REAL_VALUE_TYPE c;
8477 HOST_WIDE_INT n;
8479 c = TREE_REAL_CST (arg1);
8481 /* Optimize pow(x,0.0) = 1.0. */
8482 if (REAL_VALUES_EQUAL (c, dconst0))
8483 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8484 arg0);
8486 /* Optimize pow(x,1.0) = x. */
8487 if (REAL_VALUES_EQUAL (c, dconst1))
8488 return arg0;
8490 /* Optimize pow(x,-1.0) = 1.0/x. */
8491 if (REAL_VALUES_EQUAL (c, dconstm1))
8492 return fold_build2_loc (loc, RDIV_EXPR, type,
8493 build_real (type, dconst1), arg0);
8495 /* Optimize pow(x,0.5) = sqrt(x). */
8496 if (flag_unsafe_math_optimizations
8497 && REAL_VALUES_EQUAL (c, dconsthalf))
8499 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8501 if (sqrtfn != NULL_TREE)
8502 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8505 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8506 if (flag_unsafe_math_optimizations)
8508 const REAL_VALUE_TYPE dconstroot
8509 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8511 if (REAL_VALUES_EQUAL (c, dconstroot))
8513 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8514 if (cbrtfn != NULL_TREE)
8515 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8519 /* Check for an integer exponent. */
8520 n = real_to_integer (&c);
8521 real_from_integer (&cint, VOIDmode, n, SIGNED);
8522 if (real_identical (&c, &cint))
8524 /* Attempt to evaluate pow at compile-time, unless this should
8525 raise an exception. */
8526 if (TREE_CODE (arg0) == REAL_CST
8527 && !TREE_OVERFLOW (arg0)
8528 && (n > 0
8529 || (!flag_trapping_math && !flag_errno_math)
8530 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8532 REAL_VALUE_TYPE x;
8533 bool inexact;
8535 x = TREE_REAL_CST (arg0);
8536 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8537 if (flag_unsafe_math_optimizations || !inexact)
8538 return build_real (type, x);
8541 /* Strip sign ops from even integer powers. */
8542 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8544 tree narg0 = fold_strip_sign_ops (arg0);
8545 if (narg0)
8546 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8551 if (flag_unsafe_math_optimizations)
8553 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8555 /* Optimize pow(expN(x),y) = expN(x*y). */
8556 if (BUILTIN_EXPONENT_P (fcode))
8558 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8559 tree arg = CALL_EXPR_ARG (arg0, 0);
8560 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8561 return build_call_expr_loc (loc, expfn, 1, arg);
8564 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8565 if (BUILTIN_SQRT_P (fcode))
8567 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8568 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8569 build_real (type, dconsthalf));
8570 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8573 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8574 if (BUILTIN_CBRT_P (fcode))
8576 tree arg = CALL_EXPR_ARG (arg0, 0);
8577 if (tree_expr_nonnegative_p (arg))
8579 const REAL_VALUE_TYPE dconstroot
8580 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8581 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8582 build_real (type, dconstroot));
8583 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8587 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8588 if (fcode == BUILT_IN_POW
8589 || fcode == BUILT_IN_POWF
8590 || fcode == BUILT_IN_POWL)
8592 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8593 if (tree_expr_nonnegative_p (arg00))
8595 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8596 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8597 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8602 return NULL_TREE;
8605 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8606 Return NULL_TREE if no simplification can be made. */
8607 static tree
8608 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8609 tree arg0, tree arg1, tree type)
8611 if (!validate_arg (arg0, REAL_TYPE)
8612 || !validate_arg (arg1, INTEGER_TYPE))
8613 return NULL_TREE;
8615 /* Optimize pow(1.0,y) = 1.0. */
8616 if (real_onep (arg0))
8617 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8619 if (tree_fits_shwi_p (arg1))
8621 HOST_WIDE_INT c = tree_to_shwi (arg1);
8623 /* Evaluate powi at compile-time. */
8624 if (TREE_CODE (arg0) == REAL_CST
8625 && !TREE_OVERFLOW (arg0))
8627 REAL_VALUE_TYPE x;
8628 x = TREE_REAL_CST (arg0);
8629 real_powi (&x, TYPE_MODE (type), &x, c);
8630 return build_real (type, x);
8633 /* Optimize pow(x,0) = 1.0. */
8634 if (c == 0)
8635 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8636 arg0);
8638 /* Optimize pow(x,1) = x. */
8639 if (c == 1)
8640 return arg0;
8642 /* Optimize pow(x,-1) = 1.0/x. */
8643 if (c == -1)
8644 return fold_build2_loc (loc, RDIV_EXPR, type,
8645 build_real (type, dconst1), arg0);
8648 return NULL_TREE;
8651 /* A subroutine of fold_builtin to fold the various exponent
8652 functions. Return NULL_TREE if no simplification can be made.
8653 FUNC is the corresponding MPFR exponent function. */
8655 static tree
8656 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8657 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8659 if (validate_arg (arg, REAL_TYPE))
8661 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8662 tree res;
8664 /* Calculate the result when the argument is a constant. */
8665 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8666 return res;
8668 /* Optimize expN(logN(x)) = x. */
8669 if (flag_unsafe_math_optimizations)
8671 const enum built_in_function fcode = builtin_mathfn_code (arg);
8673 if ((func == mpfr_exp
8674 && (fcode == BUILT_IN_LOG
8675 || fcode == BUILT_IN_LOGF
8676 || fcode == BUILT_IN_LOGL))
8677 || (func == mpfr_exp2
8678 && (fcode == BUILT_IN_LOG2
8679 || fcode == BUILT_IN_LOG2F
8680 || fcode == BUILT_IN_LOG2L))
8681 || (func == mpfr_exp10
8682 && (fcode == BUILT_IN_LOG10
8683 || fcode == BUILT_IN_LOG10F
8684 || fcode == BUILT_IN_LOG10L)))
8685 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8689 return NULL_TREE;
8692 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8693 arguments to the call, and TYPE is its return type.
8694 Return NULL_TREE if no simplification can be made. */
8696 static tree
8697 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8699 if (!validate_arg (arg1, POINTER_TYPE)
8700 || !validate_arg (arg2, INTEGER_TYPE)
8701 || !validate_arg (len, INTEGER_TYPE))
8702 return NULL_TREE;
8703 else
8705 const char *p1;
8707 if (TREE_CODE (arg2) != INTEGER_CST
8708 || !tree_fits_uhwi_p (len))
8709 return NULL_TREE;
8711 p1 = c_getstr (arg1);
8712 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8714 char c;
8715 const char *r;
8716 tree tem;
8718 if (target_char_cast (arg2, &c))
8719 return NULL_TREE;
8721 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8723 if (r == NULL)
8724 return build_int_cst (TREE_TYPE (arg1), 0);
8726 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8727 return fold_convert_loc (loc, type, tem);
8729 return NULL_TREE;
8733 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8734 Return NULL_TREE if no simplification can be made. */
8736 static tree
8737 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8739 const char *p1, *p2;
8741 if (!validate_arg (arg1, POINTER_TYPE)
8742 || !validate_arg (arg2, POINTER_TYPE)
8743 || !validate_arg (len, INTEGER_TYPE))
8744 return NULL_TREE;
8746 /* If the LEN parameter is zero, return zero. */
8747 if (integer_zerop (len))
8748 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8749 arg1, arg2);
8751 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8752 if (operand_equal_p (arg1, arg2, 0))
8753 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8755 p1 = c_getstr (arg1);
8756 p2 = c_getstr (arg2);
8758 /* If all arguments are constant, and the value of len is not greater
8759 than the lengths of arg1 and arg2, evaluate at compile-time. */
8760 if (tree_fits_uhwi_p (len) && p1 && p2
8761 && compare_tree_int (len, strlen (p1) + 1) <= 0
8762 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8764 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8766 if (r > 0)
8767 return integer_one_node;
8768 else if (r < 0)
8769 return integer_minus_one_node;
8770 else
8771 return integer_zero_node;
8774 /* If len parameter is one, return an expression corresponding to
8775 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8776 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8778 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8779 tree cst_uchar_ptr_node
8780 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8782 tree ind1
8783 = fold_convert_loc (loc, integer_type_node,
8784 build1 (INDIRECT_REF, cst_uchar_node,
8785 fold_convert_loc (loc,
8786 cst_uchar_ptr_node,
8787 arg1)));
8788 tree ind2
8789 = fold_convert_loc (loc, integer_type_node,
8790 build1 (INDIRECT_REF, cst_uchar_node,
8791 fold_convert_loc (loc,
8792 cst_uchar_ptr_node,
8793 arg2)));
8794 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8797 return NULL_TREE;
8800 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8801 Return NULL_TREE if no simplification can be made. */
8803 static tree
8804 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8806 const char *p1, *p2;
8808 if (!validate_arg (arg1, POINTER_TYPE)
8809 || !validate_arg (arg2, POINTER_TYPE))
8810 return NULL_TREE;
8812 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8813 if (operand_equal_p (arg1, arg2, 0))
8814 return integer_zero_node;
8816 p1 = c_getstr (arg1);
8817 p2 = c_getstr (arg2);
8819 if (p1 && p2)
8821 const int i = strcmp (p1, p2);
8822 if (i < 0)
8823 return integer_minus_one_node;
8824 else if (i > 0)
8825 return integer_one_node;
8826 else
8827 return integer_zero_node;
8830 /* If the second arg is "", return *(const unsigned char*)arg1. */
8831 if (p2 && *p2 == '\0')
8833 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8834 tree cst_uchar_ptr_node
8835 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8837 return fold_convert_loc (loc, integer_type_node,
8838 build1 (INDIRECT_REF, cst_uchar_node,
8839 fold_convert_loc (loc,
8840 cst_uchar_ptr_node,
8841 arg1)));
8844 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8845 if (p1 && *p1 == '\0')
8847 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8848 tree cst_uchar_ptr_node
8849 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8851 tree temp
8852 = fold_convert_loc (loc, integer_type_node,
8853 build1 (INDIRECT_REF, cst_uchar_node,
8854 fold_convert_loc (loc,
8855 cst_uchar_ptr_node,
8856 arg2)));
8857 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8860 return NULL_TREE;
8863 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8864 Return NULL_TREE if no simplification can be made. */
8866 static tree
8867 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8869 const char *p1, *p2;
8871 if (!validate_arg (arg1, POINTER_TYPE)
8872 || !validate_arg (arg2, POINTER_TYPE)
8873 || !validate_arg (len, INTEGER_TYPE))
8874 return NULL_TREE;
8876 /* If the LEN parameter is zero, return zero. */
8877 if (integer_zerop (len))
8878 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8879 arg1, arg2);
8881 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8882 if (operand_equal_p (arg1, arg2, 0))
8883 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8885 p1 = c_getstr (arg1);
8886 p2 = c_getstr (arg2);
8888 if (tree_fits_uhwi_p (len) && p1 && p2)
8890 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8891 if (i > 0)
8892 return integer_one_node;
8893 else if (i < 0)
8894 return integer_minus_one_node;
8895 else
8896 return integer_zero_node;
8899 /* If the second arg is "", and the length is greater than zero,
8900 return *(const unsigned char*)arg1. */
8901 if (p2 && *p2 == '\0'
8902 && TREE_CODE (len) == INTEGER_CST
8903 && tree_int_cst_sgn (len) == 1)
8905 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8906 tree cst_uchar_ptr_node
8907 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8909 return fold_convert_loc (loc, integer_type_node,
8910 build1 (INDIRECT_REF, cst_uchar_node,
8911 fold_convert_loc (loc,
8912 cst_uchar_ptr_node,
8913 arg1)));
8916 /* If the first arg is "", and the length is greater than zero,
8917 return -*(const unsigned char*)arg2. */
8918 if (p1 && *p1 == '\0'
8919 && TREE_CODE (len) == INTEGER_CST
8920 && tree_int_cst_sgn (len) == 1)
8922 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8923 tree cst_uchar_ptr_node
8924 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8926 tree temp = fold_convert_loc (loc, integer_type_node,
8927 build1 (INDIRECT_REF, cst_uchar_node,
8928 fold_convert_loc (loc,
8929 cst_uchar_ptr_node,
8930 arg2)));
8931 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8934 /* If len parameter is one, return an expression corresponding to
8935 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8936 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8938 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8939 tree cst_uchar_ptr_node
8940 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8942 tree ind1 = fold_convert_loc (loc, integer_type_node,
8943 build1 (INDIRECT_REF, cst_uchar_node,
8944 fold_convert_loc (loc,
8945 cst_uchar_ptr_node,
8946 arg1)));
8947 tree ind2 = fold_convert_loc (loc, integer_type_node,
8948 build1 (INDIRECT_REF, cst_uchar_node,
8949 fold_convert_loc (loc,
8950 cst_uchar_ptr_node,
8951 arg2)));
8952 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8955 return NULL_TREE;
8958 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8959 ARG. Return NULL_TREE if no simplification can be made. */
8961 static tree
8962 fold_builtin_signbit (location_t loc, tree arg, tree type)
8964 if (!validate_arg (arg, REAL_TYPE))
8965 return NULL_TREE;
8967 /* If ARG is a compile-time constant, determine the result. */
8968 if (TREE_CODE (arg) == REAL_CST
8969 && !TREE_OVERFLOW (arg))
8971 REAL_VALUE_TYPE c;
8973 c = TREE_REAL_CST (arg);
8974 return (REAL_VALUE_NEGATIVE (c)
8975 ? build_one_cst (type)
8976 : build_zero_cst (type));
8979 /* If ARG is non-negative, the result is always zero. */
8980 if (tree_expr_nonnegative_p (arg))
8981 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8983 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8984 if (!HONOR_SIGNED_ZEROS (arg))
8985 return fold_convert (type,
8986 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8987 build_real (TREE_TYPE (arg), dconst0)));
8989 return NULL_TREE;
8992 /* Fold function call to builtin copysign, copysignf or copysignl with
8993 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8994 be made. */
8996 static tree
8997 fold_builtin_copysign (location_t loc, tree fndecl,
8998 tree arg1, tree arg2, tree type)
9000 tree tem;
9002 if (!validate_arg (arg1, REAL_TYPE)
9003 || !validate_arg (arg2, REAL_TYPE))
9004 return NULL_TREE;
9006 /* copysign(X,X) is X. */
9007 if (operand_equal_p (arg1, arg2, 0))
9008 return fold_convert_loc (loc, type, arg1);
9010 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9011 if (TREE_CODE (arg1) == REAL_CST
9012 && TREE_CODE (arg2) == REAL_CST
9013 && !TREE_OVERFLOW (arg1)
9014 && !TREE_OVERFLOW (arg2))
9016 REAL_VALUE_TYPE c1, c2;
9018 c1 = TREE_REAL_CST (arg1);
9019 c2 = TREE_REAL_CST (arg2);
9020 /* c1.sign := c2.sign. */
9021 real_copysign (&c1, &c2);
9022 return build_real (type, c1);
9025 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9026 Remember to evaluate Y for side-effects. */
9027 if (tree_expr_nonnegative_p (arg2))
9028 return omit_one_operand_loc (loc, type,
9029 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9030 arg2);
9032 /* Strip sign changing operations for the first argument. */
9033 tem = fold_strip_sign_ops (arg1);
9034 if (tem)
9035 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9037 return NULL_TREE;
9040 /* Fold a call to builtin isascii with argument ARG. */
9042 static tree
9043 fold_builtin_isascii (location_t loc, tree arg)
9045 if (!validate_arg (arg, INTEGER_TYPE))
9046 return NULL_TREE;
9047 else
9049 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9050 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9051 build_int_cst (integer_type_node,
9052 ~ (unsigned HOST_WIDE_INT) 0x7f));
9053 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9054 arg, integer_zero_node);
9058 /* Fold a call to builtin toascii with argument ARG. */
9060 static tree
9061 fold_builtin_toascii (location_t loc, tree arg)
9063 if (!validate_arg (arg, INTEGER_TYPE))
9064 return NULL_TREE;
9066 /* Transform toascii(c) -> (c & 0x7f). */
9067 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9068 build_int_cst (integer_type_node, 0x7f));
9071 /* Fold a call to builtin isdigit with argument ARG. */
9073 static tree
9074 fold_builtin_isdigit (location_t loc, tree arg)
9076 if (!validate_arg (arg, INTEGER_TYPE))
9077 return NULL_TREE;
9078 else
9080 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9081 /* According to the C standard, isdigit is unaffected by locale.
9082 However, it definitely is affected by the target character set. */
9083 unsigned HOST_WIDE_INT target_digit0
9084 = lang_hooks.to_target_charset ('0');
9086 if (target_digit0 == 0)
9087 return NULL_TREE;
9089 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9090 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9091 build_int_cst (unsigned_type_node, target_digit0));
9092 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9093 build_int_cst (unsigned_type_node, 9));
9097 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9099 static tree
9100 fold_builtin_fabs (location_t loc, tree arg, tree type)
9102 if (!validate_arg (arg, REAL_TYPE))
9103 return NULL_TREE;
9105 arg = fold_convert_loc (loc, type, arg);
9106 if (TREE_CODE (arg) == REAL_CST)
9107 return fold_abs_const (arg, type);
9108 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9111 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9113 static tree
9114 fold_builtin_abs (location_t loc, tree arg, tree type)
9116 if (!validate_arg (arg, INTEGER_TYPE))
9117 return NULL_TREE;
9119 arg = fold_convert_loc (loc, type, arg);
9120 if (TREE_CODE (arg) == INTEGER_CST)
9121 return fold_abs_const (arg, type);
9122 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9125 /* Fold a fma operation with arguments ARG[012]. */
9127 tree
9128 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9129 tree type, tree arg0, tree arg1, tree arg2)
9131 if (TREE_CODE (arg0) == REAL_CST
9132 && TREE_CODE (arg1) == REAL_CST
9133 && TREE_CODE (arg2) == REAL_CST)
9134 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9136 return NULL_TREE;
9139 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9141 static tree
9142 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9144 if (validate_arg (arg0, REAL_TYPE)
9145 && validate_arg (arg1, REAL_TYPE)
9146 && validate_arg (arg2, REAL_TYPE))
9148 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9149 if (tem)
9150 return tem;
9152 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9153 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9154 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9156 return NULL_TREE;
9159 /* Fold a call to builtin fmin or fmax. */
9161 static tree
9162 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9163 tree type, bool max)
9165 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9167 /* Calculate the result when the argument is a constant. */
9168 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9170 if (res)
9171 return res;
9173 /* If either argument is NaN, return the other one. Avoid the
9174 transformation if we get (and honor) a signalling NaN. Using
9175 omit_one_operand() ensures we create a non-lvalue. */
9176 if (TREE_CODE (arg0) == REAL_CST
9177 && real_isnan (&TREE_REAL_CST (arg0))
9178 && (! HONOR_SNANS (arg0)
9179 || ! TREE_REAL_CST (arg0).signalling))
9180 return omit_one_operand_loc (loc, type, arg1, arg0);
9181 if (TREE_CODE (arg1) == REAL_CST
9182 && real_isnan (&TREE_REAL_CST (arg1))
9183 && (! HONOR_SNANS (arg1)
9184 || ! TREE_REAL_CST (arg1).signalling))
9185 return omit_one_operand_loc (loc, type, arg0, arg1);
9187 /* Transform fmin/fmax(x,x) -> x. */
9188 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9189 return omit_one_operand_loc (loc, type, arg0, arg1);
9191 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9192 functions to return the numeric arg if the other one is NaN.
9193 These tree codes don't honor that, so only transform if
9194 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9195 handled, so we don't have to worry about it either. */
9196 if (flag_finite_math_only)
9197 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9198 fold_convert_loc (loc, type, arg0),
9199 fold_convert_loc (loc, type, arg1));
9201 return NULL_TREE;
9204 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9206 static tree
9207 fold_builtin_carg (location_t loc, tree arg, tree type)
9209 if (validate_arg (arg, COMPLEX_TYPE)
9210 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9212 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9214 if (atan2_fn)
9216 tree new_arg = builtin_save_expr (arg);
9217 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9218 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9219 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9223 return NULL_TREE;
9226 /* Fold a call to builtin logb/ilogb. */
9228 static tree
9229 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9231 if (! validate_arg (arg, REAL_TYPE))
9232 return NULL_TREE;
9234 STRIP_NOPS (arg);
9236 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9238 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9240 switch (value->cl)
9242 case rvc_nan:
9243 case rvc_inf:
9244 /* If arg is Inf or NaN and we're logb, return it. */
9245 if (TREE_CODE (rettype) == REAL_TYPE)
9247 /* For logb(-Inf) we have to return +Inf. */
9248 if (real_isinf (value) && real_isneg (value))
9250 REAL_VALUE_TYPE tem;
9251 real_inf (&tem);
9252 return build_real (rettype, tem);
9254 return fold_convert_loc (loc, rettype, arg);
9256 /* Fall through... */
9257 case rvc_zero:
9258 /* Zero may set errno and/or raise an exception for logb, also
9259 for ilogb we don't know FP_ILOGB0. */
9260 return NULL_TREE;
9261 case rvc_normal:
9262 /* For normal numbers, proceed iff radix == 2. In GCC,
9263 normalized significands are in the range [0.5, 1.0). We
9264 want the exponent as if they were [1.0, 2.0) so get the
9265 exponent and subtract 1. */
9266 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9267 return fold_convert_loc (loc, rettype,
9268 build_int_cst (integer_type_node,
9269 REAL_EXP (value)-1));
9270 break;
9274 return NULL_TREE;
9277 /* Fold a call to builtin significand, if radix == 2. */
9279 static tree
9280 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9282 if (! validate_arg (arg, REAL_TYPE))
9283 return NULL_TREE;
9285 STRIP_NOPS (arg);
9287 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9289 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9291 switch (value->cl)
9293 case rvc_zero:
9294 case rvc_nan:
9295 case rvc_inf:
9296 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9297 return fold_convert_loc (loc, rettype, arg);
9298 case rvc_normal:
9299 /* For normal numbers, proceed iff radix == 2. */
9300 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9302 REAL_VALUE_TYPE result = *value;
9303 /* In GCC, normalized significands are in the range [0.5,
9304 1.0). We want them to be [1.0, 2.0) so set the
9305 exponent to 1. */
9306 SET_REAL_EXP (&result, 1);
9307 return build_real (rettype, result);
9309 break;
9313 return NULL_TREE;
9316 /* Fold a call to builtin frexp, we can assume the base is 2. */
9318 static tree
9319 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9321 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9322 return NULL_TREE;
9324 STRIP_NOPS (arg0);
9326 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9327 return NULL_TREE;
9329 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9331 /* Proceed if a valid pointer type was passed in. */
9332 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9334 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9335 tree frac, exp;
9337 switch (value->cl)
9339 case rvc_zero:
9340 /* For +-0, return (*exp = 0, +-0). */
9341 exp = integer_zero_node;
9342 frac = arg0;
9343 break;
9344 case rvc_nan:
9345 case rvc_inf:
9346 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9347 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9348 case rvc_normal:
9350 /* Since the frexp function always expects base 2, and in
9351 GCC normalized significands are already in the range
9352 [0.5, 1.0), we have exactly what frexp wants. */
9353 REAL_VALUE_TYPE frac_rvt = *value;
9354 SET_REAL_EXP (&frac_rvt, 0);
9355 frac = build_real (rettype, frac_rvt);
9356 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9358 break;
9359 default:
9360 gcc_unreachable ();
9363 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9364 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9365 TREE_SIDE_EFFECTS (arg1) = 1;
9366 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9369 return NULL_TREE;
9372 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9373 then we can assume the base is two. If it's false, then we have to
9374 check the mode of the TYPE parameter in certain cases. */
9376 static tree
9377 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9378 tree type, bool ldexp)
9380 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9382 STRIP_NOPS (arg0);
9383 STRIP_NOPS (arg1);
9385 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9386 if (real_zerop (arg0) || integer_zerop (arg1)
9387 || (TREE_CODE (arg0) == REAL_CST
9388 && !real_isfinite (&TREE_REAL_CST (arg0))))
9389 return omit_one_operand_loc (loc, type, arg0, arg1);
9391 /* If both arguments are constant, then try to evaluate it. */
9392 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9393 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9394 && tree_fits_shwi_p (arg1))
9396 /* Bound the maximum adjustment to twice the range of the
9397 mode's valid exponents. Use abs to ensure the range is
9398 positive as a sanity check. */
9399 const long max_exp_adj = 2 *
9400 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9401 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9403 /* Get the user-requested adjustment. */
9404 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9406 /* The requested adjustment must be inside this range. This
9407 is a preliminary cap to avoid things like overflow, we
9408 may still fail to compute the result for other reasons. */
9409 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9411 REAL_VALUE_TYPE initial_result;
9413 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9415 /* Ensure we didn't overflow. */
9416 if (! real_isinf (&initial_result))
9418 const REAL_VALUE_TYPE trunc_result
9419 = real_value_truncate (TYPE_MODE (type), initial_result);
9421 /* Only proceed if the target mode can hold the
9422 resulting value. */
9423 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9424 return build_real (type, trunc_result);
9430 return NULL_TREE;
9433 /* Fold a call to builtin modf. */
9435 static tree
9436 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9438 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9439 return NULL_TREE;
9441 STRIP_NOPS (arg0);
9443 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9444 return NULL_TREE;
9446 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9448 /* Proceed if a valid pointer type was passed in. */
9449 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9451 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9452 REAL_VALUE_TYPE trunc, frac;
9454 switch (value->cl)
9456 case rvc_nan:
9457 case rvc_zero:
9458 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9459 trunc = frac = *value;
9460 break;
9461 case rvc_inf:
9462 /* For +-Inf, return (*arg1 = arg0, +-0). */
9463 frac = dconst0;
9464 frac.sign = value->sign;
9465 trunc = *value;
9466 break;
9467 case rvc_normal:
9468 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9469 real_trunc (&trunc, VOIDmode, value);
9470 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9471 /* If the original number was negative and already
9472 integral, then the fractional part is -0.0. */
9473 if (value->sign && frac.cl == rvc_zero)
9474 frac.sign = value->sign;
9475 break;
9478 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9479 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9480 build_real (rettype, trunc));
9481 TREE_SIDE_EFFECTS (arg1) = 1;
9482 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9483 build_real (rettype, frac));
9486 return NULL_TREE;
9489 /* Given a location LOC, an interclass builtin function decl FNDECL
9490 and its single argument ARG, return an folded expression computing
9491 the same, or NULL_TREE if we either couldn't or didn't want to fold
9492 (the latter happen if there's an RTL instruction available). */
9494 static tree
9495 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9497 machine_mode mode;
9499 if (!validate_arg (arg, REAL_TYPE))
9500 return NULL_TREE;
9502 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9503 return NULL_TREE;
9505 mode = TYPE_MODE (TREE_TYPE (arg));
9507 /* If there is no optab, try generic code. */
9508 switch (DECL_FUNCTION_CODE (fndecl))
9510 tree result;
9512 CASE_FLT_FN (BUILT_IN_ISINF):
9514 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9515 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9516 tree const type = TREE_TYPE (arg);
9517 REAL_VALUE_TYPE r;
9518 char buf[128];
9520 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9521 real_from_string (&r, buf);
9522 result = build_call_expr (isgr_fn, 2,
9523 fold_build1_loc (loc, ABS_EXPR, type, arg),
9524 build_real (type, r));
9525 return result;
9527 CASE_FLT_FN (BUILT_IN_FINITE):
9528 case BUILT_IN_ISFINITE:
9530 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9531 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9532 tree const type = TREE_TYPE (arg);
9533 REAL_VALUE_TYPE r;
9534 char buf[128];
9536 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9537 real_from_string (&r, buf);
9538 result = build_call_expr (isle_fn, 2,
9539 fold_build1_loc (loc, ABS_EXPR, type, arg),
9540 build_real (type, r));
9541 /*result = fold_build2_loc (loc, UNGT_EXPR,
9542 TREE_TYPE (TREE_TYPE (fndecl)),
9543 fold_build1_loc (loc, ABS_EXPR, type, arg),
9544 build_real (type, r));
9545 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9546 TREE_TYPE (TREE_TYPE (fndecl)),
9547 result);*/
9548 return result;
9550 case BUILT_IN_ISNORMAL:
9552 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9553 islessequal(fabs(x),DBL_MAX). */
9554 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9555 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9556 tree const type = TREE_TYPE (arg);
9557 REAL_VALUE_TYPE rmax, rmin;
9558 char buf[128];
9560 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9561 real_from_string (&rmax, buf);
9562 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9563 real_from_string (&rmin, buf);
9564 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9565 result = build_call_expr (isle_fn, 2, arg,
9566 build_real (type, rmax));
9567 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9568 build_call_expr (isge_fn, 2, arg,
9569 build_real (type, rmin)));
9570 return result;
9572 default:
9573 break;
9576 return NULL_TREE;
9579 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9580 ARG is the argument for the call. */
9582 static tree
9583 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9585 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9586 REAL_VALUE_TYPE r;
9588 if (!validate_arg (arg, REAL_TYPE))
9589 return NULL_TREE;
9591 switch (builtin_index)
9593 case BUILT_IN_ISINF:
9594 if (!HONOR_INFINITIES (arg))
9595 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9597 if (TREE_CODE (arg) == REAL_CST)
9599 r = TREE_REAL_CST (arg);
9600 if (real_isinf (&r))
9601 return real_compare (GT_EXPR, &r, &dconst0)
9602 ? integer_one_node : integer_minus_one_node;
9603 else
9604 return integer_zero_node;
9607 return NULL_TREE;
9609 case BUILT_IN_ISINF_SIGN:
9611 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9612 /* In a boolean context, GCC will fold the inner COND_EXPR to
9613 1. So e.g. "if (isinf_sign(x))" would be folded to just
9614 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9615 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9616 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9617 tree tmp = NULL_TREE;
9619 arg = builtin_save_expr (arg);
9621 if (signbit_fn && isinf_fn)
9623 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9624 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9626 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9627 signbit_call, integer_zero_node);
9628 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9629 isinf_call, integer_zero_node);
9631 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9632 integer_minus_one_node, integer_one_node);
9633 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9634 isinf_call, tmp,
9635 integer_zero_node);
9638 return tmp;
9641 case BUILT_IN_ISFINITE:
9642 if (!HONOR_NANS (arg)
9643 && !HONOR_INFINITIES (arg))
9644 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9646 if (TREE_CODE (arg) == REAL_CST)
9648 r = TREE_REAL_CST (arg);
9649 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9652 return NULL_TREE;
9654 case BUILT_IN_ISNAN:
9655 if (!HONOR_NANS (arg))
9656 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9658 if (TREE_CODE (arg) == REAL_CST)
9660 r = TREE_REAL_CST (arg);
9661 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9664 arg = builtin_save_expr (arg);
9665 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9667 default:
9668 gcc_unreachable ();
9672 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9673 This builtin will generate code to return the appropriate floating
9674 point classification depending on the value of the floating point
9675 number passed in. The possible return values must be supplied as
9676 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9677 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9678 one floating point argument which is "type generic". */
9680 static tree
9681 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9683 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9684 arg, type, res, tmp;
9685 machine_mode mode;
9686 REAL_VALUE_TYPE r;
9687 char buf[128];
9689 /* Verify the required arguments in the original call. */
9690 if (nargs != 6
9691 || !validate_arg (args[0], INTEGER_TYPE)
9692 || !validate_arg (args[1], INTEGER_TYPE)
9693 || !validate_arg (args[2], INTEGER_TYPE)
9694 || !validate_arg (args[3], INTEGER_TYPE)
9695 || !validate_arg (args[4], INTEGER_TYPE)
9696 || !validate_arg (args[5], REAL_TYPE))
9697 return NULL_TREE;
9699 fp_nan = args[0];
9700 fp_infinite = args[1];
9701 fp_normal = args[2];
9702 fp_subnormal = args[3];
9703 fp_zero = args[4];
9704 arg = args[5];
9705 type = TREE_TYPE (arg);
9706 mode = TYPE_MODE (type);
9707 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9709 /* fpclassify(x) ->
9710 isnan(x) ? FP_NAN :
9711 (fabs(x) == Inf ? FP_INFINITE :
9712 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9713 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9715 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9716 build_real (type, dconst0));
9717 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9718 tmp, fp_zero, fp_subnormal);
9720 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9721 real_from_string (&r, buf);
9722 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9723 arg, build_real (type, r));
9724 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9726 if (HONOR_INFINITIES (mode))
9728 real_inf (&r);
9729 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9730 build_real (type, r));
9731 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9732 fp_infinite, res);
9735 if (HONOR_NANS (mode))
9737 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9738 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9741 return res;
9744 /* Fold a call to an unordered comparison function such as
9745 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9746 being called and ARG0 and ARG1 are the arguments for the call.
9747 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9748 the opposite of the desired result. UNORDERED_CODE is used
9749 for modes that can hold NaNs and ORDERED_CODE is used for
9750 the rest. */
9752 static tree
9753 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9754 enum tree_code unordered_code,
9755 enum tree_code ordered_code)
9757 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9758 enum tree_code code;
9759 tree type0, type1;
9760 enum tree_code code0, code1;
9761 tree cmp_type = NULL_TREE;
9763 type0 = TREE_TYPE (arg0);
9764 type1 = TREE_TYPE (arg1);
9766 code0 = TREE_CODE (type0);
9767 code1 = TREE_CODE (type1);
9769 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9770 /* Choose the wider of two real types. */
9771 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9772 ? type0 : type1;
9773 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9774 cmp_type = type0;
9775 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9776 cmp_type = type1;
9778 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9779 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9781 if (unordered_code == UNORDERED_EXPR)
9783 if (!HONOR_NANS (arg0))
9784 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9785 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9788 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9789 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9790 fold_build2_loc (loc, code, type, arg0, arg1));
9793 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9794 arithmetics if it can never overflow, or into internal functions that
9795 return both result of arithmetics and overflowed boolean flag in
9796 a complex integer result, or some other check for overflow. */
9798 static tree
9799 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9800 tree arg0, tree arg1, tree arg2)
9802 enum internal_fn ifn = IFN_LAST;
9803 tree type = TREE_TYPE (TREE_TYPE (arg2));
9804 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9805 switch (fcode)
9807 case BUILT_IN_ADD_OVERFLOW:
9808 case BUILT_IN_SADD_OVERFLOW:
9809 case BUILT_IN_SADDL_OVERFLOW:
9810 case BUILT_IN_SADDLL_OVERFLOW:
9811 case BUILT_IN_UADD_OVERFLOW:
9812 case BUILT_IN_UADDL_OVERFLOW:
9813 case BUILT_IN_UADDLL_OVERFLOW:
9814 ifn = IFN_ADD_OVERFLOW;
9815 break;
9816 case BUILT_IN_SUB_OVERFLOW:
9817 case BUILT_IN_SSUB_OVERFLOW:
9818 case BUILT_IN_SSUBL_OVERFLOW:
9819 case BUILT_IN_SSUBLL_OVERFLOW:
9820 case BUILT_IN_USUB_OVERFLOW:
9821 case BUILT_IN_USUBL_OVERFLOW:
9822 case BUILT_IN_USUBLL_OVERFLOW:
9823 ifn = IFN_SUB_OVERFLOW;
9824 break;
9825 case BUILT_IN_MUL_OVERFLOW:
9826 case BUILT_IN_SMUL_OVERFLOW:
9827 case BUILT_IN_SMULL_OVERFLOW:
9828 case BUILT_IN_SMULLL_OVERFLOW:
9829 case BUILT_IN_UMUL_OVERFLOW:
9830 case BUILT_IN_UMULL_OVERFLOW:
9831 case BUILT_IN_UMULLL_OVERFLOW:
9832 ifn = IFN_MUL_OVERFLOW;
9833 break;
9834 default:
9835 gcc_unreachable ();
9837 tree ctype = build_complex_type (type);
9838 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9839 2, arg0, arg1);
9840 tree tgt = save_expr (call);
9841 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9842 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9843 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9844 tree store
9845 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9846 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9849 /* Fold a call to built-in function FNDECL with 0 arguments.
9850 This function returns NULL_TREE if no simplification was possible. */
9852 static tree
9853 fold_builtin_0 (location_t loc, tree fndecl)
9855 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9856 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9857 switch (fcode)
9859 CASE_FLT_FN (BUILT_IN_INF):
9860 case BUILT_IN_INFD32:
9861 case BUILT_IN_INFD64:
9862 case BUILT_IN_INFD128:
9863 return fold_builtin_inf (loc, type, true);
9865 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9866 return fold_builtin_inf (loc, type, false);
9868 case BUILT_IN_CLASSIFY_TYPE:
9869 return fold_builtin_classify_type (NULL_TREE);
9871 default:
9872 break;
9874 return NULL_TREE;
9877 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9878 This function returns NULL_TREE if no simplification was possible. */
9880 static tree
9881 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9883 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9884 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9885 switch (fcode)
9887 case BUILT_IN_CONSTANT_P:
9889 tree val = fold_builtin_constant_p (arg0);
9891 /* Gimplification will pull the CALL_EXPR for the builtin out of
9892 an if condition. When not optimizing, we'll not CSE it back.
9893 To avoid link error types of regressions, return false now. */
9894 if (!val && !optimize)
9895 val = integer_zero_node;
9897 return val;
9900 case BUILT_IN_CLASSIFY_TYPE:
9901 return fold_builtin_classify_type (arg0);
9903 case BUILT_IN_STRLEN:
9904 return fold_builtin_strlen (loc, type, arg0);
9906 CASE_FLT_FN (BUILT_IN_FABS):
9907 case BUILT_IN_FABSD32:
9908 case BUILT_IN_FABSD64:
9909 case BUILT_IN_FABSD128:
9910 return fold_builtin_fabs (loc, arg0, type);
9912 case BUILT_IN_ABS:
9913 case BUILT_IN_LABS:
9914 case BUILT_IN_LLABS:
9915 case BUILT_IN_IMAXABS:
9916 return fold_builtin_abs (loc, arg0, type);
9918 CASE_FLT_FN (BUILT_IN_CONJ):
9919 if (validate_arg (arg0, COMPLEX_TYPE)
9920 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9921 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9922 break;
9924 CASE_FLT_FN (BUILT_IN_CREAL):
9925 if (validate_arg (arg0, COMPLEX_TYPE)
9926 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9927 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9928 break;
9930 CASE_FLT_FN (BUILT_IN_CIMAG):
9931 if (validate_arg (arg0, COMPLEX_TYPE)
9932 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9933 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9934 break;
9936 CASE_FLT_FN (BUILT_IN_CCOS):
9937 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9939 CASE_FLT_FN (BUILT_IN_CCOSH):
9940 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9942 CASE_FLT_FN (BUILT_IN_CPROJ):
9943 return fold_builtin_cproj (loc, arg0, type);
9945 CASE_FLT_FN (BUILT_IN_CSIN):
9946 if (validate_arg (arg0, COMPLEX_TYPE)
9947 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9948 return do_mpc_arg1 (arg0, type, mpc_sin);
9949 break;
9951 CASE_FLT_FN (BUILT_IN_CSINH):
9952 if (validate_arg (arg0, COMPLEX_TYPE)
9953 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9954 return do_mpc_arg1 (arg0, type, mpc_sinh);
9955 break;
9957 CASE_FLT_FN (BUILT_IN_CTAN):
9958 if (validate_arg (arg0, COMPLEX_TYPE)
9959 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9960 return do_mpc_arg1 (arg0, type, mpc_tan);
9961 break;
9963 CASE_FLT_FN (BUILT_IN_CTANH):
9964 if (validate_arg (arg0, COMPLEX_TYPE)
9965 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9966 return do_mpc_arg1 (arg0, type, mpc_tanh);
9967 break;
9969 CASE_FLT_FN (BUILT_IN_CLOG):
9970 if (validate_arg (arg0, COMPLEX_TYPE)
9971 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9972 return do_mpc_arg1 (arg0, type, mpc_log);
9973 break;
9975 CASE_FLT_FN (BUILT_IN_CSQRT):
9976 if (validate_arg (arg0, COMPLEX_TYPE)
9977 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9978 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9979 break;
9981 CASE_FLT_FN (BUILT_IN_CASIN):
9982 if (validate_arg (arg0, COMPLEX_TYPE)
9983 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9984 return do_mpc_arg1 (arg0, type, mpc_asin);
9985 break;
9987 CASE_FLT_FN (BUILT_IN_CACOS):
9988 if (validate_arg (arg0, COMPLEX_TYPE)
9989 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9990 return do_mpc_arg1 (arg0, type, mpc_acos);
9991 break;
9993 CASE_FLT_FN (BUILT_IN_CATAN):
9994 if (validate_arg (arg0, COMPLEX_TYPE)
9995 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9996 return do_mpc_arg1 (arg0, type, mpc_atan);
9997 break;
9999 CASE_FLT_FN (BUILT_IN_CASINH):
10000 if (validate_arg (arg0, COMPLEX_TYPE)
10001 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10002 return do_mpc_arg1 (arg0, type, mpc_asinh);
10003 break;
10005 CASE_FLT_FN (BUILT_IN_CACOSH):
10006 if (validate_arg (arg0, COMPLEX_TYPE)
10007 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10008 return do_mpc_arg1 (arg0, type, mpc_acosh);
10009 break;
10011 CASE_FLT_FN (BUILT_IN_CATANH):
10012 if (validate_arg (arg0, COMPLEX_TYPE)
10013 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10014 return do_mpc_arg1 (arg0, type, mpc_atanh);
10015 break;
10017 CASE_FLT_FN (BUILT_IN_CABS):
10018 return fold_builtin_cabs (loc, arg0, type, fndecl);
10020 CASE_FLT_FN (BUILT_IN_CARG):
10021 return fold_builtin_carg (loc, arg0, type);
10023 CASE_FLT_FN (BUILT_IN_SQRT):
10024 return fold_builtin_sqrt (loc, arg0, type);
10026 CASE_FLT_FN (BUILT_IN_CBRT):
10027 return fold_builtin_cbrt (loc, arg0, type);
10029 CASE_FLT_FN (BUILT_IN_ASIN):
10030 if (validate_arg (arg0, REAL_TYPE))
10031 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10032 &dconstm1, &dconst1, true);
10033 break;
10035 CASE_FLT_FN (BUILT_IN_ACOS):
10036 if (validate_arg (arg0, REAL_TYPE))
10037 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10038 &dconstm1, &dconst1, true);
10039 break;
10041 CASE_FLT_FN (BUILT_IN_ATAN):
10042 if (validate_arg (arg0, REAL_TYPE))
10043 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10044 break;
10046 CASE_FLT_FN (BUILT_IN_ASINH):
10047 if (validate_arg (arg0, REAL_TYPE))
10048 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10049 break;
10051 CASE_FLT_FN (BUILT_IN_ACOSH):
10052 if (validate_arg (arg0, REAL_TYPE))
10053 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10054 &dconst1, NULL, true);
10055 break;
10057 CASE_FLT_FN (BUILT_IN_ATANH):
10058 if (validate_arg (arg0, REAL_TYPE))
10059 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10060 &dconstm1, &dconst1, false);
10061 break;
10063 CASE_FLT_FN (BUILT_IN_SIN):
10064 if (validate_arg (arg0, REAL_TYPE))
10065 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10066 break;
10068 CASE_FLT_FN (BUILT_IN_COS):
10069 return fold_builtin_cos (loc, arg0, type, fndecl);
10071 CASE_FLT_FN (BUILT_IN_TAN):
10072 return fold_builtin_tan (arg0, type);
10074 CASE_FLT_FN (BUILT_IN_CEXP):
10075 return fold_builtin_cexp (loc, arg0, type);
10077 CASE_FLT_FN (BUILT_IN_CEXPI):
10078 if (validate_arg (arg0, REAL_TYPE))
10079 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10080 break;
10082 CASE_FLT_FN (BUILT_IN_SINH):
10083 if (validate_arg (arg0, REAL_TYPE))
10084 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10085 break;
10087 CASE_FLT_FN (BUILT_IN_COSH):
10088 return fold_builtin_cosh (loc, arg0, type, fndecl);
10090 CASE_FLT_FN (BUILT_IN_TANH):
10091 if (validate_arg (arg0, REAL_TYPE))
10092 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10093 break;
10095 CASE_FLT_FN (BUILT_IN_ERF):
10096 if (validate_arg (arg0, REAL_TYPE))
10097 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10098 break;
10100 CASE_FLT_FN (BUILT_IN_ERFC):
10101 if (validate_arg (arg0, REAL_TYPE))
10102 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10103 break;
10105 CASE_FLT_FN (BUILT_IN_TGAMMA):
10106 if (validate_arg (arg0, REAL_TYPE))
10107 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10108 break;
10110 CASE_FLT_FN (BUILT_IN_EXP):
10111 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10113 CASE_FLT_FN (BUILT_IN_EXP2):
10114 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10116 CASE_FLT_FN (BUILT_IN_EXP10):
10117 CASE_FLT_FN (BUILT_IN_POW10):
10118 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10120 CASE_FLT_FN (BUILT_IN_EXPM1):
10121 if (validate_arg (arg0, REAL_TYPE))
10122 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10123 break;
10125 CASE_FLT_FN (BUILT_IN_LOG):
10126 if (validate_arg (arg0, REAL_TYPE))
10127 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
10128 break;
10130 CASE_FLT_FN (BUILT_IN_LOG2):
10131 if (validate_arg (arg0, REAL_TYPE))
10132 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
10133 break;
10135 CASE_FLT_FN (BUILT_IN_LOG10):
10136 if (validate_arg (arg0, REAL_TYPE))
10137 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
10138 break;
10140 CASE_FLT_FN (BUILT_IN_LOG1P):
10141 if (validate_arg (arg0, REAL_TYPE))
10142 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10143 &dconstm1, NULL, false);
10144 break;
10146 CASE_FLT_FN (BUILT_IN_J0):
10147 if (validate_arg (arg0, REAL_TYPE))
10148 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10149 NULL, NULL, 0);
10150 break;
10152 CASE_FLT_FN (BUILT_IN_J1):
10153 if (validate_arg (arg0, REAL_TYPE))
10154 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10155 NULL, NULL, 0);
10156 break;
10158 CASE_FLT_FN (BUILT_IN_Y0):
10159 if (validate_arg (arg0, REAL_TYPE))
10160 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10161 &dconst0, NULL, false);
10162 break;
10164 CASE_FLT_FN (BUILT_IN_Y1):
10165 if (validate_arg (arg0, REAL_TYPE))
10166 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10167 &dconst0, NULL, false);
10168 break;
10170 CASE_FLT_FN (BUILT_IN_NAN):
10171 case BUILT_IN_NAND32:
10172 case BUILT_IN_NAND64:
10173 case BUILT_IN_NAND128:
10174 return fold_builtin_nan (arg0, type, true);
10176 CASE_FLT_FN (BUILT_IN_NANS):
10177 return fold_builtin_nan (arg0, type, false);
10179 CASE_FLT_FN (BUILT_IN_FLOOR):
10180 return fold_builtin_floor (loc, fndecl, arg0);
10182 CASE_FLT_FN (BUILT_IN_CEIL):
10183 return fold_builtin_ceil (loc, fndecl, arg0);
10185 CASE_FLT_FN (BUILT_IN_TRUNC):
10186 return fold_builtin_trunc (loc, fndecl, arg0);
10188 CASE_FLT_FN (BUILT_IN_ROUND):
10189 return fold_builtin_round (loc, fndecl, arg0);
10191 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10192 CASE_FLT_FN (BUILT_IN_RINT):
10193 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10195 CASE_FLT_FN (BUILT_IN_ICEIL):
10196 CASE_FLT_FN (BUILT_IN_LCEIL):
10197 CASE_FLT_FN (BUILT_IN_LLCEIL):
10198 CASE_FLT_FN (BUILT_IN_LFLOOR):
10199 CASE_FLT_FN (BUILT_IN_IFLOOR):
10200 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10201 CASE_FLT_FN (BUILT_IN_IROUND):
10202 CASE_FLT_FN (BUILT_IN_LROUND):
10203 CASE_FLT_FN (BUILT_IN_LLROUND):
10204 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10206 CASE_FLT_FN (BUILT_IN_IRINT):
10207 CASE_FLT_FN (BUILT_IN_LRINT):
10208 CASE_FLT_FN (BUILT_IN_LLRINT):
10209 return fold_fixed_mathfn (loc, fndecl, arg0);
10211 case BUILT_IN_BSWAP16:
10212 case BUILT_IN_BSWAP32:
10213 case BUILT_IN_BSWAP64:
10214 return fold_builtin_bswap (fndecl, arg0);
10216 CASE_INT_FN (BUILT_IN_FFS):
10217 CASE_INT_FN (BUILT_IN_CLZ):
10218 CASE_INT_FN (BUILT_IN_CTZ):
10219 CASE_INT_FN (BUILT_IN_CLRSB):
10220 CASE_INT_FN (BUILT_IN_POPCOUNT):
10221 CASE_INT_FN (BUILT_IN_PARITY):
10222 return fold_builtin_bitop (fndecl, arg0);
10224 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10225 return fold_builtin_signbit (loc, arg0, type);
10227 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10228 return fold_builtin_significand (loc, arg0, type);
10230 CASE_FLT_FN (BUILT_IN_ILOGB):
10231 CASE_FLT_FN (BUILT_IN_LOGB):
10232 return fold_builtin_logb (loc, arg0, type);
10234 case BUILT_IN_ISASCII:
10235 return fold_builtin_isascii (loc, arg0);
10237 case BUILT_IN_TOASCII:
10238 return fold_builtin_toascii (loc, arg0);
10240 case BUILT_IN_ISDIGIT:
10241 return fold_builtin_isdigit (loc, arg0);
10243 CASE_FLT_FN (BUILT_IN_FINITE):
10244 case BUILT_IN_FINITED32:
10245 case BUILT_IN_FINITED64:
10246 case BUILT_IN_FINITED128:
10247 case BUILT_IN_ISFINITE:
10249 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10250 if (ret)
10251 return ret;
10252 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10255 CASE_FLT_FN (BUILT_IN_ISINF):
10256 case BUILT_IN_ISINFD32:
10257 case BUILT_IN_ISINFD64:
10258 case BUILT_IN_ISINFD128:
10260 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10261 if (ret)
10262 return ret;
10263 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10266 case BUILT_IN_ISNORMAL:
10267 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10269 case BUILT_IN_ISINF_SIGN:
10270 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10272 CASE_FLT_FN (BUILT_IN_ISNAN):
10273 case BUILT_IN_ISNAND32:
10274 case BUILT_IN_ISNAND64:
10275 case BUILT_IN_ISNAND128:
10276 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10278 case BUILT_IN_FREE:
10279 if (integer_zerop (arg0))
10280 return build_empty_stmt (loc);
10281 break;
10283 default:
10284 break;
10287 return NULL_TREE;
10291 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10292 This function returns NULL_TREE if no simplification was possible. */
10294 static tree
10295 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
10297 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10298 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10300 switch (fcode)
10302 CASE_FLT_FN (BUILT_IN_JN):
10303 if (validate_arg (arg0, INTEGER_TYPE)
10304 && validate_arg (arg1, REAL_TYPE))
10305 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10306 break;
10308 CASE_FLT_FN (BUILT_IN_YN):
10309 if (validate_arg (arg0, INTEGER_TYPE)
10310 && validate_arg (arg1, REAL_TYPE))
10311 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10312 &dconst0, false);
10313 break;
10315 CASE_FLT_FN (BUILT_IN_DREM):
10316 CASE_FLT_FN (BUILT_IN_REMAINDER):
10317 if (validate_arg (arg0, REAL_TYPE)
10318 && validate_arg (arg1, REAL_TYPE))
10319 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10320 break;
10322 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10323 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10324 if (validate_arg (arg0, REAL_TYPE)
10325 && validate_arg (arg1, POINTER_TYPE))
10326 return do_mpfr_lgamma_r (arg0, arg1, type);
10327 break;
10329 CASE_FLT_FN (BUILT_IN_ATAN2):
10330 if (validate_arg (arg0, REAL_TYPE)
10331 && validate_arg (arg1, REAL_TYPE))
10332 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10333 break;
10335 CASE_FLT_FN (BUILT_IN_FDIM):
10336 if (validate_arg (arg0, REAL_TYPE)
10337 && validate_arg (arg1, REAL_TYPE))
10338 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10339 break;
10341 CASE_FLT_FN (BUILT_IN_HYPOT):
10342 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10344 CASE_FLT_FN (BUILT_IN_CPOW):
10345 if (validate_arg (arg0, COMPLEX_TYPE)
10346 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10347 && validate_arg (arg1, COMPLEX_TYPE)
10348 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10349 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10350 break;
10352 CASE_FLT_FN (BUILT_IN_LDEXP):
10353 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10354 CASE_FLT_FN (BUILT_IN_SCALBN):
10355 CASE_FLT_FN (BUILT_IN_SCALBLN):
10356 return fold_builtin_load_exponent (loc, arg0, arg1,
10357 type, /*ldexp=*/false);
10359 CASE_FLT_FN (BUILT_IN_FREXP):
10360 return fold_builtin_frexp (loc, arg0, arg1, type);
10362 CASE_FLT_FN (BUILT_IN_MODF):
10363 return fold_builtin_modf (loc, arg0, arg1, type);
10365 case BUILT_IN_STRSTR:
10366 return fold_builtin_strstr (loc, arg0, arg1, type);
10368 case BUILT_IN_STRSPN:
10369 return fold_builtin_strspn (loc, arg0, arg1);
10371 case BUILT_IN_STRCSPN:
10372 return fold_builtin_strcspn (loc, arg0, arg1);
10374 case BUILT_IN_STRCHR:
10375 case BUILT_IN_INDEX:
10376 return fold_builtin_strchr (loc, arg0, arg1, type);
10378 case BUILT_IN_STRRCHR:
10379 case BUILT_IN_RINDEX:
10380 return fold_builtin_strrchr (loc, arg0, arg1, type);
10382 case BUILT_IN_STRCMP:
10383 return fold_builtin_strcmp (loc, arg0, arg1);
10385 case BUILT_IN_STRPBRK:
10386 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10388 case BUILT_IN_EXPECT:
10389 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10391 CASE_FLT_FN (BUILT_IN_POW):
10392 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10394 CASE_FLT_FN (BUILT_IN_POWI):
10395 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10397 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10398 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10400 CASE_FLT_FN (BUILT_IN_FMIN):
10401 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10403 CASE_FLT_FN (BUILT_IN_FMAX):
10404 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10406 case BUILT_IN_ISGREATER:
10407 return fold_builtin_unordered_cmp (loc, fndecl,
10408 arg0, arg1, UNLE_EXPR, LE_EXPR);
10409 case BUILT_IN_ISGREATEREQUAL:
10410 return fold_builtin_unordered_cmp (loc, fndecl,
10411 arg0, arg1, UNLT_EXPR, LT_EXPR);
10412 case BUILT_IN_ISLESS:
10413 return fold_builtin_unordered_cmp (loc, fndecl,
10414 arg0, arg1, UNGE_EXPR, GE_EXPR);
10415 case BUILT_IN_ISLESSEQUAL:
10416 return fold_builtin_unordered_cmp (loc, fndecl,
10417 arg0, arg1, UNGT_EXPR, GT_EXPR);
10418 case BUILT_IN_ISLESSGREATER:
10419 return fold_builtin_unordered_cmp (loc, fndecl,
10420 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10421 case BUILT_IN_ISUNORDERED:
10422 return fold_builtin_unordered_cmp (loc, fndecl,
10423 arg0, arg1, UNORDERED_EXPR,
10424 NOP_EXPR);
10426 /* We do the folding for va_start in the expander. */
10427 case BUILT_IN_VA_START:
10428 break;
10430 case BUILT_IN_OBJECT_SIZE:
10431 return fold_builtin_object_size (arg0, arg1);
10433 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10434 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10436 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10437 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10439 default:
10440 break;
10442 return NULL_TREE;
10445 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10446 and ARG2.
10447 This function returns NULL_TREE if no simplification was possible. */
10449 static tree
10450 fold_builtin_3 (location_t loc, tree fndecl,
10451 tree arg0, tree arg1, tree arg2)
10453 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10454 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10455 switch (fcode)
10458 CASE_FLT_FN (BUILT_IN_SINCOS):
10459 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10461 CASE_FLT_FN (BUILT_IN_FMA):
10462 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10463 break;
10465 CASE_FLT_FN (BUILT_IN_REMQUO):
10466 if (validate_arg (arg0, REAL_TYPE)
10467 && validate_arg (arg1, REAL_TYPE)
10468 && validate_arg (arg2, POINTER_TYPE))
10469 return do_mpfr_remquo (arg0, arg1, arg2);
10470 break;
10472 case BUILT_IN_STRNCMP:
10473 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10475 case BUILT_IN_MEMCHR:
10476 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10478 case BUILT_IN_BCMP:
10479 case BUILT_IN_MEMCMP:
10480 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10482 case BUILT_IN_EXPECT:
10483 return fold_builtin_expect (loc, arg0, arg1, arg2);
10485 case BUILT_IN_ADD_OVERFLOW:
10486 case BUILT_IN_SUB_OVERFLOW:
10487 case BUILT_IN_MUL_OVERFLOW:
10488 case BUILT_IN_SADD_OVERFLOW:
10489 case BUILT_IN_SADDL_OVERFLOW:
10490 case BUILT_IN_SADDLL_OVERFLOW:
10491 case BUILT_IN_SSUB_OVERFLOW:
10492 case BUILT_IN_SSUBL_OVERFLOW:
10493 case BUILT_IN_SSUBLL_OVERFLOW:
10494 case BUILT_IN_SMUL_OVERFLOW:
10495 case BUILT_IN_SMULL_OVERFLOW:
10496 case BUILT_IN_SMULLL_OVERFLOW:
10497 case BUILT_IN_UADD_OVERFLOW:
10498 case BUILT_IN_UADDL_OVERFLOW:
10499 case BUILT_IN_UADDLL_OVERFLOW:
10500 case BUILT_IN_USUB_OVERFLOW:
10501 case BUILT_IN_USUBL_OVERFLOW:
10502 case BUILT_IN_USUBLL_OVERFLOW:
10503 case BUILT_IN_UMUL_OVERFLOW:
10504 case BUILT_IN_UMULL_OVERFLOW:
10505 case BUILT_IN_UMULLL_OVERFLOW:
10506 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10508 default:
10509 break;
10511 return NULL_TREE;
10514 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10515 arguments. IGNORE is true if the result of the
10516 function call is ignored. This function returns NULL_TREE if no
10517 simplification was possible. */
10519 tree
10520 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
10522 tree ret = NULL_TREE;
10524 switch (nargs)
10526 case 0:
10527 ret = fold_builtin_0 (loc, fndecl);
10528 break;
10529 case 1:
10530 ret = fold_builtin_1 (loc, fndecl, args[0]);
10531 break;
10532 case 2:
10533 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
10534 break;
10535 case 3:
10536 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10537 break;
10538 default:
10539 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10540 break;
10542 if (ret)
10544 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10545 SET_EXPR_LOCATION (ret, loc);
10546 TREE_NO_WARNING (ret) = 1;
10547 return ret;
10549 return NULL_TREE;
10552 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10553 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10554 of arguments in ARGS to be omitted. OLDNARGS is the number of
10555 elements in ARGS. */
10557 static tree
10558 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10559 int skip, tree fndecl, int n, va_list newargs)
10561 int nargs = oldnargs - skip + n;
10562 tree *buffer;
10564 if (n > 0)
10566 int i, j;
10568 buffer = XALLOCAVEC (tree, nargs);
10569 for (i = 0; i < n; i++)
10570 buffer[i] = va_arg (newargs, tree);
10571 for (j = skip; j < oldnargs; j++, i++)
10572 buffer[i] = args[j];
10574 else
10575 buffer = args + skip;
10577 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10580 /* Return true if FNDECL shouldn't be folded right now.
10581 If a built-in function has an inline attribute always_inline
10582 wrapper, defer folding it after always_inline functions have
10583 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10584 might not be performed. */
10586 bool
10587 avoid_folding_inline_builtin (tree fndecl)
10589 return (DECL_DECLARED_INLINE_P (fndecl)
10590 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10591 && cfun
10592 && !cfun->always_inline_functions_inlined
10593 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10596 /* A wrapper function for builtin folding that prevents warnings for
10597 "statement without effect" and the like, caused by removing the
10598 call node earlier than the warning is generated. */
10600 tree
10601 fold_call_expr (location_t loc, tree exp, bool ignore)
10603 tree ret = NULL_TREE;
10604 tree fndecl = get_callee_fndecl (exp);
10605 if (fndecl
10606 && TREE_CODE (fndecl) == FUNCTION_DECL
10607 && DECL_BUILT_IN (fndecl)
10608 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10609 yet. Defer folding until we see all the arguments
10610 (after inlining). */
10611 && !CALL_EXPR_VA_ARG_PACK (exp))
10613 int nargs = call_expr_nargs (exp);
10615 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10616 instead last argument is __builtin_va_arg_pack (). Defer folding
10617 even in that case, until arguments are finalized. */
10618 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10620 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10621 if (fndecl2
10622 && TREE_CODE (fndecl2) == FUNCTION_DECL
10623 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10624 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10625 return NULL_TREE;
10628 if (avoid_folding_inline_builtin (fndecl))
10629 return NULL_TREE;
10631 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10632 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10633 CALL_EXPR_ARGP (exp), ignore);
10634 else
10636 tree *args = CALL_EXPR_ARGP (exp);
10637 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10638 if (ret)
10639 return ret;
10642 return NULL_TREE;
10645 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10646 N arguments are passed in the array ARGARRAY. Return a folded
10647 expression or NULL_TREE if no simplification was possible. */
10649 tree
10650 fold_builtin_call_array (location_t loc, tree,
10651 tree fn,
10652 int n,
10653 tree *argarray)
10655 if (TREE_CODE (fn) != ADDR_EXPR)
10656 return NULL_TREE;
10658 tree fndecl = TREE_OPERAND (fn, 0);
10659 if (TREE_CODE (fndecl) == FUNCTION_DECL
10660 && DECL_BUILT_IN (fndecl))
10662 /* If last argument is __builtin_va_arg_pack (), arguments to this
10663 function are not finalized yet. Defer folding until they are. */
10664 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10666 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10667 if (fndecl2
10668 && TREE_CODE (fndecl2) == FUNCTION_DECL
10669 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10670 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10671 return NULL_TREE;
10673 if (avoid_folding_inline_builtin (fndecl))
10674 return NULL_TREE;
10675 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10676 return targetm.fold_builtin (fndecl, n, argarray, false);
10677 else
10678 return fold_builtin_n (loc, fndecl, argarray, n, false);
10681 return NULL_TREE;
10684 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10685 along with N new arguments specified as the "..." parameters. SKIP
10686 is the number of arguments in EXP to be omitted. This function is used
10687 to do varargs-to-varargs transformations. */
10689 static tree
10690 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10692 va_list ap;
10693 tree t;
10695 va_start (ap, n);
10696 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10697 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10698 va_end (ap);
10700 return t;
10703 /* Validate a single argument ARG against a tree code CODE representing
10704 a type. */
10706 static bool
10707 validate_arg (const_tree arg, enum tree_code code)
10709 if (!arg)
10710 return false;
10711 else if (code == POINTER_TYPE)
10712 return POINTER_TYPE_P (TREE_TYPE (arg));
10713 else if (code == INTEGER_TYPE)
10714 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10715 return code == TREE_CODE (TREE_TYPE (arg));
10718 /* This function validates the types of a function call argument list
10719 against a specified list of tree_codes. If the last specifier is a 0,
10720 that represents an ellipses, otherwise the last specifier must be a
10721 VOID_TYPE.
10723 This is the GIMPLE version of validate_arglist. Eventually we want to
10724 completely convert builtins.c to work from GIMPLEs and the tree based
10725 validate_arglist will then be removed. */
10727 bool
10728 validate_gimple_arglist (const gcall *call, ...)
10730 enum tree_code code;
10731 bool res = 0;
10732 va_list ap;
10733 const_tree arg;
10734 size_t i;
10736 va_start (ap, call);
10737 i = 0;
10741 code = (enum tree_code) va_arg (ap, int);
10742 switch (code)
10744 case 0:
10745 /* This signifies an ellipses, any further arguments are all ok. */
10746 res = true;
10747 goto end;
10748 case VOID_TYPE:
10749 /* This signifies an endlink, if no arguments remain, return
10750 true, otherwise return false. */
10751 res = (i == gimple_call_num_args (call));
10752 goto end;
10753 default:
10754 /* If no parameters remain or the parameter's code does not
10755 match the specified code, return false. Otherwise continue
10756 checking any remaining arguments. */
10757 arg = gimple_call_arg (call, i++);
10758 if (!validate_arg (arg, code))
10759 goto end;
10760 break;
10763 while (1);
10765 /* We need gotos here since we can only have one VA_CLOSE in a
10766 function. */
10767 end: ;
10768 va_end (ap);
10770 return res;
10773 /* Default target-specific builtin expander that does nothing. */
10776 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10777 rtx target ATTRIBUTE_UNUSED,
10778 rtx subtarget ATTRIBUTE_UNUSED,
10779 machine_mode mode ATTRIBUTE_UNUSED,
10780 int ignore ATTRIBUTE_UNUSED)
10782 return NULL_RTX;
10785 /* Returns true is EXP represents data that would potentially reside
10786 in a readonly section. */
10788 bool
10789 readonly_data_expr (tree exp)
10791 STRIP_NOPS (exp);
10793 if (TREE_CODE (exp) != ADDR_EXPR)
10794 return false;
10796 exp = get_base_address (TREE_OPERAND (exp, 0));
10797 if (!exp)
10798 return false;
10800 /* Make sure we call decl_readonly_section only for trees it
10801 can handle (since it returns true for everything it doesn't
10802 understand). */
10803 if (TREE_CODE (exp) == STRING_CST
10804 || TREE_CODE (exp) == CONSTRUCTOR
10805 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10806 return decl_readonly_section (exp, 0);
10807 else
10808 return false;
10811 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10812 to the call, and TYPE is its return type.
10814 Return NULL_TREE if no simplification was possible, otherwise return the
10815 simplified form of the call as a tree.
10817 The simplified form may be a constant or other expression which
10818 computes the same value, but in a more efficient manner (including
10819 calls to other builtin functions).
10821 The call may contain arguments which need to be evaluated, but
10822 which are not useful to determine the result of the call. In
10823 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10824 COMPOUND_EXPR will be an argument which must be evaluated.
10825 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10826 COMPOUND_EXPR in the chain will contain the tree for the simplified
10827 form of the builtin function call. */
10829 static tree
10830 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10832 if (!validate_arg (s1, POINTER_TYPE)
10833 || !validate_arg (s2, POINTER_TYPE))
10834 return NULL_TREE;
10835 else
10837 tree fn;
10838 const char *p1, *p2;
10840 p2 = c_getstr (s2);
10841 if (p2 == NULL)
10842 return NULL_TREE;
10844 p1 = c_getstr (s1);
10845 if (p1 != NULL)
10847 const char *r = strstr (p1, p2);
10848 tree tem;
10850 if (r == NULL)
10851 return build_int_cst (TREE_TYPE (s1), 0);
10853 /* Return an offset into the constant string argument. */
10854 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10855 return fold_convert_loc (loc, type, tem);
10858 /* The argument is const char *, and the result is char *, so we need
10859 a type conversion here to avoid a warning. */
10860 if (p2[0] == '\0')
10861 return fold_convert_loc (loc, type, s1);
10863 if (p2[1] != '\0')
10864 return NULL_TREE;
10866 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10867 if (!fn)
10868 return NULL_TREE;
10870 /* New argument list transforming strstr(s1, s2) to
10871 strchr(s1, s2[0]). */
10872 return build_call_expr_loc (loc, fn, 2, s1,
10873 build_int_cst (integer_type_node, p2[0]));
10877 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10878 the call, and TYPE is its return type.
10880 Return NULL_TREE if no simplification was possible, otherwise return the
10881 simplified form of the call as a tree.
10883 The simplified form may be a constant or other expression which
10884 computes the same value, but in a more efficient manner (including
10885 calls to other builtin functions).
10887 The call may contain arguments which need to be evaluated, but
10888 which are not useful to determine the result of the call. In
10889 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10890 COMPOUND_EXPR will be an argument which must be evaluated.
10891 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10892 COMPOUND_EXPR in the chain will contain the tree for the simplified
10893 form of the builtin function call. */
10895 static tree
10896 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10898 if (!validate_arg (s1, POINTER_TYPE)
10899 || !validate_arg (s2, INTEGER_TYPE))
10900 return NULL_TREE;
10901 else
10903 const char *p1;
10905 if (TREE_CODE (s2) != INTEGER_CST)
10906 return NULL_TREE;
10908 p1 = c_getstr (s1);
10909 if (p1 != NULL)
10911 char c;
10912 const char *r;
10913 tree tem;
10915 if (target_char_cast (s2, &c))
10916 return NULL_TREE;
10918 r = strchr (p1, c);
10920 if (r == NULL)
10921 return build_int_cst (TREE_TYPE (s1), 0);
10923 /* Return an offset into the constant string argument. */
10924 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10925 return fold_convert_loc (loc, type, tem);
10927 return NULL_TREE;
10931 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10932 the call, and TYPE is its return type.
10934 Return NULL_TREE if no simplification was possible, otherwise return the
10935 simplified form of the call as a tree.
10937 The simplified form may be a constant or other expression which
10938 computes the same value, but in a more efficient manner (including
10939 calls to other builtin functions).
10941 The call may contain arguments which need to be evaluated, but
10942 which are not useful to determine the result of the call. In
10943 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10944 COMPOUND_EXPR will be an argument which must be evaluated.
10945 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10946 COMPOUND_EXPR in the chain will contain the tree for the simplified
10947 form of the builtin function call. */
10949 static tree
10950 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10952 if (!validate_arg (s1, POINTER_TYPE)
10953 || !validate_arg (s2, INTEGER_TYPE))
10954 return NULL_TREE;
10955 else
10957 tree fn;
10958 const char *p1;
10960 if (TREE_CODE (s2) != INTEGER_CST)
10961 return NULL_TREE;
10963 p1 = c_getstr (s1);
10964 if (p1 != NULL)
10966 char c;
10967 const char *r;
10968 tree tem;
10970 if (target_char_cast (s2, &c))
10971 return NULL_TREE;
10973 r = strrchr (p1, c);
10975 if (r == NULL)
10976 return build_int_cst (TREE_TYPE (s1), 0);
10978 /* Return an offset into the constant string argument. */
10979 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10980 return fold_convert_loc (loc, type, tem);
10983 if (! integer_zerop (s2))
10984 return NULL_TREE;
10986 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10987 if (!fn)
10988 return NULL_TREE;
10990 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10991 return build_call_expr_loc (loc, fn, 2, s1, s2);
10995 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10996 to the call, and TYPE is its return type.
10998 Return NULL_TREE if no simplification was possible, otherwise return the
10999 simplified form of the call as a tree.
11001 The simplified form may be a constant or other expression which
11002 computes the same value, but in a more efficient manner (including
11003 calls to other builtin functions).
11005 The call may contain arguments which need to be evaluated, but
11006 which are not useful to determine the result of the call. In
11007 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11008 COMPOUND_EXPR will be an argument which must be evaluated.
11009 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11010 COMPOUND_EXPR in the chain will contain the tree for the simplified
11011 form of the builtin function call. */
11013 static tree
11014 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11016 if (!validate_arg (s1, POINTER_TYPE)
11017 || !validate_arg (s2, POINTER_TYPE))
11018 return NULL_TREE;
11019 else
11021 tree fn;
11022 const char *p1, *p2;
11024 p2 = c_getstr (s2);
11025 if (p2 == NULL)
11026 return NULL_TREE;
11028 p1 = c_getstr (s1);
11029 if (p1 != NULL)
11031 const char *r = strpbrk (p1, p2);
11032 tree tem;
11034 if (r == NULL)
11035 return build_int_cst (TREE_TYPE (s1), 0);
11037 /* Return an offset into the constant string argument. */
11038 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11039 return fold_convert_loc (loc, type, tem);
11042 if (p2[0] == '\0')
11043 /* strpbrk(x, "") == NULL.
11044 Evaluate and ignore s1 in case it had side-effects. */
11045 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11047 if (p2[1] != '\0')
11048 return NULL_TREE; /* Really call strpbrk. */
11050 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11051 if (!fn)
11052 return NULL_TREE;
11054 /* New argument list transforming strpbrk(s1, s2) to
11055 strchr(s1, s2[0]). */
11056 return build_call_expr_loc (loc, fn, 2, s1,
11057 build_int_cst (integer_type_node, p2[0]));
11061 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11062 to the call.
11064 Return NULL_TREE if no simplification was possible, otherwise return the
11065 simplified form of the call as a tree.
11067 The simplified form may be a constant or other expression which
11068 computes the same value, but in a more efficient manner (including
11069 calls to other builtin functions).
11071 The call may contain arguments which need to be evaluated, but
11072 which are not useful to determine the result of the call. In
11073 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11074 COMPOUND_EXPR will be an argument which must be evaluated.
11075 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11076 COMPOUND_EXPR in the chain will contain the tree for the simplified
11077 form of the builtin function call. */
11079 static tree
11080 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11082 if (!validate_arg (s1, POINTER_TYPE)
11083 || !validate_arg (s2, POINTER_TYPE))
11084 return NULL_TREE;
11085 else
11087 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11089 /* If both arguments are constants, evaluate at compile-time. */
11090 if (p1 && p2)
11092 const size_t r = strspn (p1, p2);
11093 return build_int_cst (size_type_node, r);
11096 /* If either argument is "", return NULL_TREE. */
11097 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11098 /* Evaluate and ignore both arguments in case either one has
11099 side-effects. */
11100 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11101 s1, s2);
11102 return NULL_TREE;
11106 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11107 to the call.
11109 Return NULL_TREE if no simplification was possible, otherwise return the
11110 simplified form of the call as a tree.
11112 The simplified form may be a constant or other expression which
11113 computes the same value, but in a more efficient manner (including
11114 calls to other builtin functions).
11116 The call may contain arguments which need to be evaluated, but
11117 which are not useful to determine the result of the call. In
11118 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11119 COMPOUND_EXPR will be an argument which must be evaluated.
11120 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11121 COMPOUND_EXPR in the chain will contain the tree for the simplified
11122 form of the builtin function call. */
11124 static tree
11125 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11127 if (!validate_arg (s1, POINTER_TYPE)
11128 || !validate_arg (s2, POINTER_TYPE))
11129 return NULL_TREE;
11130 else
11132 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11134 /* If both arguments are constants, evaluate at compile-time. */
11135 if (p1 && p2)
11137 const size_t r = strcspn (p1, p2);
11138 return build_int_cst (size_type_node, r);
11141 /* If the first argument is "", return NULL_TREE. */
11142 if (p1 && *p1 == '\0')
11144 /* Evaluate and ignore argument s2 in case it has
11145 side-effects. */
11146 return omit_one_operand_loc (loc, size_type_node,
11147 size_zero_node, s2);
11150 /* If the second argument is "", return __builtin_strlen(s1). */
11151 if (p2 && *p2 == '\0')
11153 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11155 /* If the replacement _DECL isn't initialized, don't do the
11156 transformation. */
11157 if (!fn)
11158 return NULL_TREE;
11160 return build_call_expr_loc (loc, fn, 1, s1);
11162 return NULL_TREE;
11166 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11167 produced. False otherwise. This is done so that we don't output the error
11168 or warning twice or three times. */
11170 bool
11171 fold_builtin_next_arg (tree exp, bool va_start_p)
11173 tree fntype = TREE_TYPE (current_function_decl);
11174 int nargs = call_expr_nargs (exp);
11175 tree arg;
11176 /* There is good chance the current input_location points inside the
11177 definition of the va_start macro (perhaps on the token for
11178 builtin) in a system header, so warnings will not be emitted.
11179 Use the location in real source code. */
11180 source_location current_location =
11181 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11182 NULL);
11184 if (!stdarg_p (fntype))
11186 error ("%<va_start%> used in function with fixed args");
11187 return true;
11190 if (va_start_p)
11192 if (va_start_p && (nargs != 2))
11194 error ("wrong number of arguments to function %<va_start%>");
11195 return true;
11197 arg = CALL_EXPR_ARG (exp, 1);
11199 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11200 when we checked the arguments and if needed issued a warning. */
11201 else
11203 if (nargs == 0)
11205 /* Evidently an out of date version of <stdarg.h>; can't validate
11206 va_start's second argument, but can still work as intended. */
11207 warning_at (current_location,
11208 OPT_Wvarargs,
11209 "%<__builtin_next_arg%> called without an argument");
11210 return true;
11212 else if (nargs > 1)
11214 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11215 return true;
11217 arg = CALL_EXPR_ARG (exp, 0);
11220 if (TREE_CODE (arg) == SSA_NAME)
11221 arg = SSA_NAME_VAR (arg);
11223 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11224 or __builtin_next_arg (0) the first time we see it, after checking
11225 the arguments and if needed issuing a warning. */
11226 if (!integer_zerop (arg))
11228 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11230 /* Strip off all nops for the sake of the comparison. This
11231 is not quite the same as STRIP_NOPS. It does more.
11232 We must also strip off INDIRECT_EXPR for C++ reference
11233 parameters. */
11234 while (CONVERT_EXPR_P (arg)
11235 || TREE_CODE (arg) == INDIRECT_REF)
11236 arg = TREE_OPERAND (arg, 0);
11237 if (arg != last_parm)
11239 /* FIXME: Sometimes with the tree optimizers we can get the
11240 not the last argument even though the user used the last
11241 argument. We just warn and set the arg to be the last
11242 argument so that we will get wrong-code because of
11243 it. */
11244 warning_at (current_location,
11245 OPT_Wvarargs,
11246 "second parameter of %<va_start%> not last named argument");
11249 /* Undefined by C99 7.15.1.4p4 (va_start):
11250 "If the parameter parmN is declared with the register storage
11251 class, with a function or array type, or with a type that is
11252 not compatible with the type that results after application of
11253 the default argument promotions, the behavior is undefined."
11255 else if (DECL_REGISTER (arg))
11257 warning_at (current_location,
11258 OPT_Wvarargs,
11259 "undefined behaviour when second parameter of "
11260 "%<va_start%> is declared with %<register%> storage");
11263 /* We want to verify the second parameter just once before the tree
11264 optimizers are run and then avoid keeping it in the tree,
11265 as otherwise we could warn even for correct code like:
11266 void foo (int i, ...)
11267 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11268 if (va_start_p)
11269 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11270 else
11271 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11273 return false;
11277 /* Expand a call EXP to __builtin_object_size. */
11279 static rtx
11280 expand_builtin_object_size (tree exp)
11282 tree ost;
11283 int object_size_type;
11284 tree fndecl = get_callee_fndecl (exp);
11286 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11288 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11289 exp, fndecl);
11290 expand_builtin_trap ();
11291 return const0_rtx;
11294 ost = CALL_EXPR_ARG (exp, 1);
11295 STRIP_NOPS (ost);
11297 if (TREE_CODE (ost) != INTEGER_CST
11298 || tree_int_cst_sgn (ost) < 0
11299 || compare_tree_int (ost, 3) > 0)
11301 error ("%Klast argument of %D is not integer constant between 0 and 3",
11302 exp, fndecl);
11303 expand_builtin_trap ();
11304 return const0_rtx;
11307 object_size_type = tree_to_shwi (ost);
11309 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11312 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11313 FCODE is the BUILT_IN_* to use.
11314 Return NULL_RTX if we failed; the caller should emit a normal call,
11315 otherwise try to get the result in TARGET, if convenient (and in
11316 mode MODE if that's convenient). */
11318 static rtx
11319 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11320 enum built_in_function fcode)
11322 tree dest, src, len, size;
11324 if (!validate_arglist (exp,
11325 POINTER_TYPE,
11326 fcode == BUILT_IN_MEMSET_CHK
11327 ? INTEGER_TYPE : POINTER_TYPE,
11328 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11329 return NULL_RTX;
11331 dest = CALL_EXPR_ARG (exp, 0);
11332 src = CALL_EXPR_ARG (exp, 1);
11333 len = CALL_EXPR_ARG (exp, 2);
11334 size = CALL_EXPR_ARG (exp, 3);
11336 if (! tree_fits_uhwi_p (size))
11337 return NULL_RTX;
11339 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11341 tree fn;
11343 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11345 warning_at (tree_nonartificial_location (exp),
11346 0, "%Kcall to %D will always overflow destination buffer",
11347 exp, get_callee_fndecl (exp));
11348 return NULL_RTX;
11351 fn = NULL_TREE;
11352 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11353 mem{cpy,pcpy,move,set} is available. */
11354 switch (fcode)
11356 case BUILT_IN_MEMCPY_CHK:
11357 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11358 break;
11359 case BUILT_IN_MEMPCPY_CHK:
11360 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11361 break;
11362 case BUILT_IN_MEMMOVE_CHK:
11363 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11364 break;
11365 case BUILT_IN_MEMSET_CHK:
11366 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11367 break;
11368 default:
11369 break;
11372 if (! fn)
11373 return NULL_RTX;
11375 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11376 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11377 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11378 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11380 else if (fcode == BUILT_IN_MEMSET_CHK)
11381 return NULL_RTX;
11382 else
11384 unsigned int dest_align = get_pointer_alignment (dest);
11386 /* If DEST is not a pointer type, call the normal function. */
11387 if (dest_align == 0)
11388 return NULL_RTX;
11390 /* If SRC and DEST are the same (and not volatile), do nothing. */
11391 if (operand_equal_p (src, dest, 0))
11393 tree expr;
11395 if (fcode != BUILT_IN_MEMPCPY_CHK)
11397 /* Evaluate and ignore LEN in case it has side-effects. */
11398 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11399 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11402 expr = fold_build_pointer_plus (dest, len);
11403 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11406 /* __memmove_chk special case. */
11407 if (fcode == BUILT_IN_MEMMOVE_CHK)
11409 unsigned int src_align = get_pointer_alignment (src);
11411 if (src_align == 0)
11412 return NULL_RTX;
11414 /* If src is categorized for a readonly section we can use
11415 normal __memcpy_chk. */
11416 if (readonly_data_expr (src))
11418 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11419 if (!fn)
11420 return NULL_RTX;
11421 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11422 dest, src, len, size);
11423 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11424 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11425 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11428 return NULL_RTX;
11432 /* Emit warning if a buffer overflow is detected at compile time. */
11434 static void
11435 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11437 int is_strlen = 0;
11438 tree len, size;
11439 location_t loc = tree_nonartificial_location (exp);
11441 switch (fcode)
11443 case BUILT_IN_STRCPY_CHK:
11444 case BUILT_IN_STPCPY_CHK:
11445 /* For __strcat_chk the warning will be emitted only if overflowing
11446 by at least strlen (dest) + 1 bytes. */
11447 case BUILT_IN_STRCAT_CHK:
11448 len = CALL_EXPR_ARG (exp, 1);
11449 size = CALL_EXPR_ARG (exp, 2);
11450 is_strlen = 1;
11451 break;
11452 case BUILT_IN_STRNCAT_CHK:
11453 case BUILT_IN_STRNCPY_CHK:
11454 case BUILT_IN_STPNCPY_CHK:
11455 len = CALL_EXPR_ARG (exp, 2);
11456 size = CALL_EXPR_ARG (exp, 3);
11457 break;
11458 case BUILT_IN_SNPRINTF_CHK:
11459 case BUILT_IN_VSNPRINTF_CHK:
11460 len = CALL_EXPR_ARG (exp, 1);
11461 size = CALL_EXPR_ARG (exp, 3);
11462 break;
11463 default:
11464 gcc_unreachable ();
11467 if (!len || !size)
11468 return;
11470 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11471 return;
11473 if (is_strlen)
11475 len = c_strlen (len, 1);
11476 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11477 return;
11479 else if (fcode == BUILT_IN_STRNCAT_CHK)
11481 tree src = CALL_EXPR_ARG (exp, 1);
11482 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11483 return;
11484 src = c_strlen (src, 1);
11485 if (! src || ! tree_fits_uhwi_p (src))
11487 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11488 exp, get_callee_fndecl (exp));
11489 return;
11491 else if (tree_int_cst_lt (src, size))
11492 return;
11494 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11495 return;
11497 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11498 exp, get_callee_fndecl (exp));
11501 /* Emit warning if a buffer overflow is detected at compile time
11502 in __sprintf_chk/__vsprintf_chk calls. */
11504 static void
11505 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11507 tree size, len, fmt;
11508 const char *fmt_str;
11509 int nargs = call_expr_nargs (exp);
11511 /* Verify the required arguments in the original call. */
11513 if (nargs < 4)
11514 return;
11515 size = CALL_EXPR_ARG (exp, 2);
11516 fmt = CALL_EXPR_ARG (exp, 3);
11518 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11519 return;
11521 /* Check whether the format is a literal string constant. */
11522 fmt_str = c_getstr (fmt);
11523 if (fmt_str == NULL)
11524 return;
11526 if (!init_target_chars ())
11527 return;
11529 /* If the format doesn't contain % args or %%, we know its size. */
11530 if (strchr (fmt_str, target_percent) == 0)
11531 len = build_int_cstu (size_type_node, strlen (fmt_str));
11532 /* If the format is "%s" and first ... argument is a string literal,
11533 we know it too. */
11534 else if (fcode == BUILT_IN_SPRINTF_CHK
11535 && strcmp (fmt_str, target_percent_s) == 0)
11537 tree arg;
11539 if (nargs < 5)
11540 return;
11541 arg = CALL_EXPR_ARG (exp, 4);
11542 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11543 return;
11545 len = c_strlen (arg, 1);
11546 if (!len || ! tree_fits_uhwi_p (len))
11547 return;
11549 else
11550 return;
11552 if (! tree_int_cst_lt (len, size))
11553 warning_at (tree_nonartificial_location (exp),
11554 0, "%Kcall to %D will always overflow destination buffer",
11555 exp, get_callee_fndecl (exp));
11558 /* Emit warning if a free is called with address of a variable. */
11560 static void
11561 maybe_emit_free_warning (tree exp)
11563 tree arg = CALL_EXPR_ARG (exp, 0);
11565 STRIP_NOPS (arg);
11566 if (TREE_CODE (arg) != ADDR_EXPR)
11567 return;
11569 arg = get_base_address (TREE_OPERAND (arg, 0));
11570 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11571 return;
11573 if (SSA_VAR_P (arg))
11574 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11575 "%Kattempt to free a non-heap object %qD", exp, arg);
11576 else
11577 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11578 "%Kattempt to free a non-heap object", exp);
11581 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11582 if possible. */
11584 static tree
11585 fold_builtin_object_size (tree ptr, tree ost)
11587 unsigned HOST_WIDE_INT bytes;
11588 int object_size_type;
11590 if (!validate_arg (ptr, POINTER_TYPE)
11591 || !validate_arg (ost, INTEGER_TYPE))
11592 return NULL_TREE;
11594 STRIP_NOPS (ost);
11596 if (TREE_CODE (ost) != INTEGER_CST
11597 || tree_int_cst_sgn (ost) < 0
11598 || compare_tree_int (ost, 3) > 0)
11599 return NULL_TREE;
11601 object_size_type = tree_to_shwi (ost);
11603 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11604 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11605 and (size_t) 0 for types 2 and 3. */
11606 if (TREE_SIDE_EFFECTS (ptr))
11607 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11609 if (TREE_CODE (ptr) == ADDR_EXPR)
11611 bytes = compute_builtin_object_size (ptr, object_size_type);
11612 if (wi::fits_to_tree_p (bytes, size_type_node))
11613 return build_int_cstu (size_type_node, bytes);
11615 else if (TREE_CODE (ptr) == SSA_NAME)
11617 /* If object size is not known yet, delay folding until
11618 later. Maybe subsequent passes will help determining
11619 it. */
11620 bytes = compute_builtin_object_size (ptr, object_size_type);
11621 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11622 && wi::fits_to_tree_p (bytes, size_type_node))
11623 return build_int_cstu (size_type_node, bytes);
11626 return NULL_TREE;
11629 /* Builtins with folding operations that operate on "..." arguments
11630 need special handling; we need to store the arguments in a convenient
11631 data structure before attempting any folding. Fortunately there are
11632 only a few builtins that fall into this category. FNDECL is the
11633 function, EXP is the CALL_EXPR for the call. */
11635 static tree
11636 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11638 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11639 tree ret = NULL_TREE;
11641 switch (fcode)
11643 case BUILT_IN_FPCLASSIFY:
11644 ret = fold_builtin_fpclassify (loc, args, nargs);
11645 break;
11647 default:
11648 break;
11650 if (ret)
11652 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11653 SET_EXPR_LOCATION (ret, loc);
11654 TREE_NO_WARNING (ret) = 1;
11655 return ret;
11657 return NULL_TREE;
11660 /* Initialize format string characters in the target charset. */
11662 bool
11663 init_target_chars (void)
11665 static bool init;
11666 if (!init)
11668 target_newline = lang_hooks.to_target_charset ('\n');
11669 target_percent = lang_hooks.to_target_charset ('%');
11670 target_c = lang_hooks.to_target_charset ('c');
11671 target_s = lang_hooks.to_target_charset ('s');
11672 if (target_newline == 0 || target_percent == 0 || target_c == 0
11673 || target_s == 0)
11674 return false;
11676 target_percent_c[0] = target_percent;
11677 target_percent_c[1] = target_c;
11678 target_percent_c[2] = '\0';
11680 target_percent_s[0] = target_percent;
11681 target_percent_s[1] = target_s;
11682 target_percent_s[2] = '\0';
11684 target_percent_s_newline[0] = target_percent;
11685 target_percent_s_newline[1] = target_s;
11686 target_percent_s_newline[2] = target_newline;
11687 target_percent_s_newline[3] = '\0';
11689 init = true;
11691 return true;
11694 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11695 and no overflow/underflow occurred. INEXACT is true if M was not
11696 exactly calculated. TYPE is the tree type for the result. This
11697 function assumes that you cleared the MPFR flags and then
11698 calculated M to see if anything subsequently set a flag prior to
11699 entering this function. Return NULL_TREE if any checks fail. */
11701 static tree
11702 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11704 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11705 overflow/underflow occurred. If -frounding-math, proceed iff the
11706 result of calling FUNC was exact. */
11707 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11708 && (!flag_rounding_math || !inexact))
11710 REAL_VALUE_TYPE rr;
11712 real_from_mpfr (&rr, m, type, GMP_RNDN);
11713 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11714 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11715 but the mpft_t is not, then we underflowed in the
11716 conversion. */
11717 if (real_isfinite (&rr)
11718 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11720 REAL_VALUE_TYPE rmode;
11722 real_convert (&rmode, TYPE_MODE (type), &rr);
11723 /* Proceed iff the specified mode can hold the value. */
11724 if (real_identical (&rmode, &rr))
11725 return build_real (type, rmode);
11728 return NULL_TREE;
11731 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11732 number and no overflow/underflow occurred. INEXACT is true if M
11733 was not exactly calculated. TYPE is the tree type for the result.
11734 This function assumes that you cleared the MPFR flags and then
11735 calculated M to see if anything subsequently set a flag prior to
11736 entering this function. Return NULL_TREE if any checks fail, if
11737 FORCE_CONVERT is true, then bypass the checks. */
11739 static tree
11740 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11742 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11743 overflow/underflow occurred. If -frounding-math, proceed iff the
11744 result of calling FUNC was exact. */
11745 if (force_convert
11746 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11747 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11748 && (!flag_rounding_math || !inexact)))
11750 REAL_VALUE_TYPE re, im;
11752 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11753 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11754 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11755 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11756 but the mpft_t is not, then we underflowed in the
11757 conversion. */
11758 if (force_convert
11759 || (real_isfinite (&re) && real_isfinite (&im)
11760 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11761 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11763 REAL_VALUE_TYPE re_mode, im_mode;
11765 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11766 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11767 /* Proceed iff the specified mode can hold the value. */
11768 if (force_convert
11769 || (real_identical (&re_mode, &re)
11770 && real_identical (&im_mode, &im)))
11771 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11772 build_real (TREE_TYPE (type), im_mode));
11775 return NULL_TREE;
11778 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11779 FUNC on it and return the resulting value as a tree with type TYPE.
11780 If MIN and/or MAX are not NULL, then the supplied ARG must be
11781 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11782 acceptable values, otherwise they are not. The mpfr precision is
11783 set to the precision of TYPE. We assume that function FUNC returns
11784 zero if the result could be calculated exactly within the requested
11785 precision. */
11787 static tree
11788 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11789 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11790 bool inclusive)
11792 tree result = NULL_TREE;
11794 STRIP_NOPS (arg);
11796 /* To proceed, MPFR must exactly represent the target floating point
11797 format, which only happens when the target base equals two. */
11798 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11799 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
11801 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11803 if (real_isfinite (ra)
11804 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11805 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
11807 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11808 const int prec = fmt->p;
11809 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11810 int inexact;
11811 mpfr_t m;
11813 mpfr_init2 (m, prec);
11814 mpfr_from_real (m, ra, GMP_RNDN);
11815 mpfr_clear_flags ();
11816 inexact = func (m, m, rnd);
11817 result = do_mpfr_ckconv (m, type, inexact);
11818 mpfr_clear (m);
11822 return result;
11825 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11826 FUNC on it and return the resulting value as a tree with type TYPE.
11827 The mpfr precision is set to the precision of TYPE. We assume that
11828 function FUNC returns zero if the result could be calculated
11829 exactly within the requested precision. */
11831 static tree
11832 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11833 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11835 tree result = NULL_TREE;
11837 STRIP_NOPS (arg1);
11838 STRIP_NOPS (arg2);
11840 /* To proceed, MPFR must exactly represent the target floating point
11841 format, which only happens when the target base equals two. */
11842 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11843 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11844 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11846 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11847 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11849 if (real_isfinite (ra1) && real_isfinite (ra2))
11851 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11852 const int prec = fmt->p;
11853 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11854 int inexact;
11855 mpfr_t m1, m2;
11857 mpfr_inits2 (prec, m1, m2, NULL);
11858 mpfr_from_real (m1, ra1, GMP_RNDN);
11859 mpfr_from_real (m2, ra2, GMP_RNDN);
11860 mpfr_clear_flags ();
11861 inexact = func (m1, m1, m2, rnd);
11862 result = do_mpfr_ckconv (m1, type, inexact);
11863 mpfr_clears (m1, m2, NULL);
11867 return result;
11870 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11871 FUNC on it and return the resulting value as a tree with type TYPE.
11872 The mpfr precision is set to the precision of TYPE. We assume that
11873 function FUNC returns zero if the result could be calculated
11874 exactly within the requested precision. */
11876 static tree
11877 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11878 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11880 tree result = NULL_TREE;
11882 STRIP_NOPS (arg1);
11883 STRIP_NOPS (arg2);
11884 STRIP_NOPS (arg3);
11886 /* To proceed, MPFR must exactly represent the target floating point
11887 format, which only happens when the target base equals two. */
11888 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11889 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11890 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11891 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
11893 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11894 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11895 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11897 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
11899 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11900 const int prec = fmt->p;
11901 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11902 int inexact;
11903 mpfr_t m1, m2, m3;
11905 mpfr_inits2 (prec, m1, m2, m3, NULL);
11906 mpfr_from_real (m1, ra1, GMP_RNDN);
11907 mpfr_from_real (m2, ra2, GMP_RNDN);
11908 mpfr_from_real (m3, ra3, GMP_RNDN);
11909 mpfr_clear_flags ();
11910 inexact = func (m1, m1, m2, m3, rnd);
11911 result = do_mpfr_ckconv (m1, type, inexact);
11912 mpfr_clears (m1, m2, m3, NULL);
11916 return result;
11919 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11920 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11921 If ARG_SINP and ARG_COSP are NULL then the result is returned
11922 as a complex value.
11923 The type is taken from the type of ARG and is used for setting the
11924 precision of the calculation and results. */
11926 static tree
11927 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11929 tree const type = TREE_TYPE (arg);
11930 tree result = NULL_TREE;
11932 STRIP_NOPS (arg);
11934 /* To proceed, MPFR must exactly represent the target floating point
11935 format, which only happens when the target base equals two. */
11936 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11937 && TREE_CODE (arg) == REAL_CST
11938 && !TREE_OVERFLOW (arg))
11940 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11942 if (real_isfinite (ra))
11944 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11945 const int prec = fmt->p;
11946 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11947 tree result_s, result_c;
11948 int inexact;
11949 mpfr_t m, ms, mc;
11951 mpfr_inits2 (prec, m, ms, mc, NULL);
11952 mpfr_from_real (m, ra, GMP_RNDN);
11953 mpfr_clear_flags ();
11954 inexact = mpfr_sin_cos (ms, mc, m, rnd);
11955 result_s = do_mpfr_ckconv (ms, type, inexact);
11956 result_c = do_mpfr_ckconv (mc, type, inexact);
11957 mpfr_clears (m, ms, mc, NULL);
11958 if (result_s && result_c)
11960 /* If we are to return in a complex value do so. */
11961 if (!arg_sinp && !arg_cosp)
11962 return build_complex (build_complex_type (type),
11963 result_c, result_s);
11965 /* Dereference the sin/cos pointer arguments. */
11966 arg_sinp = build_fold_indirect_ref (arg_sinp);
11967 arg_cosp = build_fold_indirect_ref (arg_cosp);
11968 /* Proceed if valid pointer type were passed in. */
11969 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
11970 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
11972 /* Set the values. */
11973 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
11974 result_s);
11975 TREE_SIDE_EFFECTS (result_s) = 1;
11976 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
11977 result_c);
11978 TREE_SIDE_EFFECTS (result_c) = 1;
11979 /* Combine the assignments into a compound expr. */
11980 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11981 result_s, result_c));
11986 return result;
11989 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
11990 two-argument mpfr order N Bessel function FUNC on them and return
11991 the resulting value as a tree with type TYPE. The mpfr precision
11992 is set to the precision of TYPE. We assume that function FUNC
11993 returns zero if the result could be calculated exactly within the
11994 requested precision. */
11995 static tree
11996 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
11997 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
11998 const REAL_VALUE_TYPE *min, bool inclusive)
12000 tree result = NULL_TREE;
12002 STRIP_NOPS (arg1);
12003 STRIP_NOPS (arg2);
12005 /* To proceed, MPFR must exactly represent the target floating point
12006 format, which only happens when the target base equals two. */
12007 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12008 && tree_fits_shwi_p (arg1)
12009 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12011 const HOST_WIDE_INT n = tree_to_shwi (arg1);
12012 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12014 if (n == (long)n
12015 && real_isfinite (ra)
12016 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12018 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12019 const int prec = fmt->p;
12020 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12021 int inexact;
12022 mpfr_t m;
12024 mpfr_init2 (m, prec);
12025 mpfr_from_real (m, ra, GMP_RNDN);
12026 mpfr_clear_flags ();
12027 inexact = func (m, n, m, rnd);
12028 result = do_mpfr_ckconv (m, type, inexact);
12029 mpfr_clear (m);
12033 return result;
12036 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12037 the pointer *(ARG_QUO) and return the result. The type is taken
12038 from the type of ARG0 and is used for setting the precision of the
12039 calculation and results. */
12041 static tree
12042 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12044 tree const type = TREE_TYPE (arg0);
12045 tree result = NULL_TREE;
12047 STRIP_NOPS (arg0);
12048 STRIP_NOPS (arg1);
12050 /* To proceed, MPFR must exactly represent the target floating point
12051 format, which only happens when the target base equals two. */
12052 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12053 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12054 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12056 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12057 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12059 if (real_isfinite (ra0) && real_isfinite (ra1))
12061 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12062 const int prec = fmt->p;
12063 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12064 tree result_rem;
12065 long integer_quo;
12066 mpfr_t m0, m1;
12068 mpfr_inits2 (prec, m0, m1, NULL);
12069 mpfr_from_real (m0, ra0, GMP_RNDN);
12070 mpfr_from_real (m1, ra1, GMP_RNDN);
12071 mpfr_clear_flags ();
12072 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12073 /* Remquo is independent of the rounding mode, so pass
12074 inexact=0 to do_mpfr_ckconv(). */
12075 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12076 mpfr_clears (m0, m1, NULL);
12077 if (result_rem)
12079 /* MPFR calculates quo in the host's long so it may
12080 return more bits in quo than the target int can hold
12081 if sizeof(host long) > sizeof(target int). This can
12082 happen even for native compilers in LP64 mode. In
12083 these cases, modulo the quo value with the largest
12084 number that the target int can hold while leaving one
12085 bit for the sign. */
12086 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12087 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12089 /* Dereference the quo pointer argument. */
12090 arg_quo = build_fold_indirect_ref (arg_quo);
12091 /* Proceed iff a valid pointer type was passed in. */
12092 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12094 /* Set the value. */
12095 tree result_quo
12096 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12097 build_int_cst (TREE_TYPE (arg_quo),
12098 integer_quo));
12099 TREE_SIDE_EFFECTS (result_quo) = 1;
12100 /* Combine the quo assignment with the rem. */
12101 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12102 result_quo, result_rem));
12107 return result;
12110 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12111 resulting value as a tree with type TYPE. The mpfr precision is
12112 set to the precision of TYPE. We assume that this mpfr function
12113 returns zero if the result could be calculated exactly within the
12114 requested precision. In addition, the integer pointer represented
12115 by ARG_SG will be dereferenced and set to the appropriate signgam
12116 (-1,1) value. */
12118 static tree
12119 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12121 tree result = NULL_TREE;
12123 STRIP_NOPS (arg);
12125 /* To proceed, MPFR must exactly represent the target floating point
12126 format, which only happens when the target base equals two. Also
12127 verify ARG is a constant and that ARG_SG is an int pointer. */
12128 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12129 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12130 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12131 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12133 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12135 /* In addition to NaN and Inf, the argument cannot be zero or a
12136 negative integer. */
12137 if (real_isfinite (ra)
12138 && ra->cl != rvc_zero
12139 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12141 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12142 const int prec = fmt->p;
12143 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12144 int inexact, sg;
12145 mpfr_t m;
12146 tree result_lg;
12148 mpfr_init2 (m, prec);
12149 mpfr_from_real (m, ra, GMP_RNDN);
12150 mpfr_clear_flags ();
12151 inexact = mpfr_lgamma (m, &sg, m, rnd);
12152 result_lg = do_mpfr_ckconv (m, type, inexact);
12153 mpfr_clear (m);
12154 if (result_lg)
12156 tree result_sg;
12158 /* Dereference the arg_sg pointer argument. */
12159 arg_sg = build_fold_indirect_ref (arg_sg);
12160 /* Assign the signgam value into *arg_sg. */
12161 result_sg = fold_build2 (MODIFY_EXPR,
12162 TREE_TYPE (arg_sg), arg_sg,
12163 build_int_cst (TREE_TYPE (arg_sg), sg));
12164 TREE_SIDE_EFFECTS (result_sg) = 1;
12165 /* Combine the signgam assignment with the lgamma result. */
12166 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12167 result_sg, result_lg));
12172 return result;
12175 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12176 function FUNC on it and return the resulting value as a tree with
12177 type TYPE. The mpfr precision is set to the precision of TYPE. We
12178 assume that function FUNC returns zero if the result could be
12179 calculated exactly within the requested precision. */
12181 static tree
12182 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12184 tree result = NULL_TREE;
12186 STRIP_NOPS (arg);
12188 /* To proceed, MPFR must exactly represent the target floating point
12189 format, which only happens when the target base equals two. */
12190 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12191 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12192 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12194 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12195 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12197 if (real_isfinite (re) && real_isfinite (im))
12199 const struct real_format *const fmt =
12200 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12201 const int prec = fmt->p;
12202 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12203 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12204 int inexact;
12205 mpc_t m;
12207 mpc_init2 (m, prec);
12208 mpfr_from_real (mpc_realref (m), re, rnd);
12209 mpfr_from_real (mpc_imagref (m), im, rnd);
12210 mpfr_clear_flags ();
12211 inexact = func (m, m, crnd);
12212 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12213 mpc_clear (m);
12217 return result;
12220 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12221 mpc function FUNC on it and return the resulting value as a tree
12222 with type TYPE. The mpfr precision is set to the precision of
12223 TYPE. We assume that function FUNC returns zero if the result
12224 could be calculated exactly within the requested precision. If
12225 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12226 in the arguments and/or results. */
12228 tree
12229 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12230 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12232 tree result = NULL_TREE;
12234 STRIP_NOPS (arg0);
12235 STRIP_NOPS (arg1);
12237 /* To proceed, MPFR must exactly represent the target floating point
12238 format, which only happens when the target base equals two. */
12239 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12240 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12241 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12242 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12243 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12245 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12246 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12247 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12248 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12250 if (do_nonfinite
12251 || (real_isfinite (re0) && real_isfinite (im0)
12252 && real_isfinite (re1) && real_isfinite (im1)))
12254 const struct real_format *const fmt =
12255 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12256 const int prec = fmt->p;
12257 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12258 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12259 int inexact;
12260 mpc_t m0, m1;
12262 mpc_init2 (m0, prec);
12263 mpc_init2 (m1, prec);
12264 mpfr_from_real (mpc_realref (m0), re0, rnd);
12265 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12266 mpfr_from_real (mpc_realref (m1), re1, rnd);
12267 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12268 mpfr_clear_flags ();
12269 inexact = func (m0, m0, m1, crnd);
12270 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12271 mpc_clear (m0);
12272 mpc_clear (m1);
12276 return result;
12279 /* A wrapper function for builtin folding that prevents warnings for
12280 "statement without effect" and the like, caused by removing the
12281 call node earlier than the warning is generated. */
12283 tree
12284 fold_call_stmt (gcall *stmt, bool ignore)
12286 tree ret = NULL_TREE;
12287 tree fndecl = gimple_call_fndecl (stmt);
12288 location_t loc = gimple_location (stmt);
12289 if (fndecl
12290 && TREE_CODE (fndecl) == FUNCTION_DECL
12291 && DECL_BUILT_IN (fndecl)
12292 && !gimple_call_va_arg_pack_p (stmt))
12294 int nargs = gimple_call_num_args (stmt);
12295 tree *args = (nargs > 0
12296 ? gimple_call_arg_ptr (stmt, 0)
12297 : &error_mark_node);
12299 if (avoid_folding_inline_builtin (fndecl))
12300 return NULL_TREE;
12301 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12303 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12305 else
12307 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12308 if (ret)
12310 /* Propagate location information from original call to
12311 expansion of builtin. Otherwise things like
12312 maybe_emit_chk_warning, that operate on the expansion
12313 of a builtin, will use the wrong location information. */
12314 if (gimple_has_location (stmt))
12316 tree realret = ret;
12317 if (TREE_CODE (ret) == NOP_EXPR)
12318 realret = TREE_OPERAND (ret, 0);
12319 if (CAN_HAVE_LOCATION_P (realret)
12320 && !EXPR_HAS_LOCATION (realret))
12321 SET_EXPR_LOCATION (realret, loc);
12322 return realret;
12324 return ret;
12328 return NULL_TREE;
12331 /* Look up the function in builtin_decl that corresponds to DECL
12332 and set ASMSPEC as its user assembler name. DECL must be a
12333 function decl that declares a builtin. */
12335 void
12336 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12338 tree builtin;
12339 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12340 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12341 && asmspec != 0);
12343 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12344 set_user_assembler_name (builtin, asmspec);
12345 switch (DECL_FUNCTION_CODE (decl))
12347 case BUILT_IN_MEMCPY:
12348 init_block_move_fn (asmspec);
12349 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12350 break;
12351 case BUILT_IN_MEMSET:
12352 init_block_clear_fn (asmspec);
12353 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12354 break;
12355 case BUILT_IN_MEMMOVE:
12356 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12357 break;
12358 case BUILT_IN_MEMCMP:
12359 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12360 break;
12361 case BUILT_IN_ABORT:
12362 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12363 break;
12364 case BUILT_IN_FFS:
12365 if (INT_TYPE_SIZE < BITS_PER_WORD)
12367 set_user_assembler_libfunc ("ffs", asmspec);
12368 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12369 MODE_INT, 0), "ffs");
12371 break;
12372 default:
12373 break;
12377 /* Return true if DECL is a builtin that expands to a constant or similarly
12378 simple code. */
12379 bool
12380 is_simple_builtin (tree decl)
12382 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12383 switch (DECL_FUNCTION_CODE (decl))
12385 /* Builtins that expand to constants. */
12386 case BUILT_IN_CONSTANT_P:
12387 case BUILT_IN_EXPECT:
12388 case BUILT_IN_OBJECT_SIZE:
12389 case BUILT_IN_UNREACHABLE:
12390 /* Simple register moves or loads from stack. */
12391 case BUILT_IN_ASSUME_ALIGNED:
12392 case BUILT_IN_RETURN_ADDRESS:
12393 case BUILT_IN_EXTRACT_RETURN_ADDR:
12394 case BUILT_IN_FROB_RETURN_ADDR:
12395 case BUILT_IN_RETURN:
12396 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12397 case BUILT_IN_FRAME_ADDRESS:
12398 case BUILT_IN_VA_END:
12399 case BUILT_IN_STACK_SAVE:
12400 case BUILT_IN_STACK_RESTORE:
12401 /* Exception state returns or moves registers around. */
12402 case BUILT_IN_EH_FILTER:
12403 case BUILT_IN_EH_POINTER:
12404 case BUILT_IN_EH_COPY_VALUES:
12405 return true;
12407 default:
12408 return false;
12411 return false;
12414 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12415 most probably expanded inline into reasonably simple code. This is a
12416 superset of is_simple_builtin. */
12417 bool
12418 is_inexpensive_builtin (tree decl)
12420 if (!decl)
12421 return false;
12422 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12423 return true;
12424 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12425 switch (DECL_FUNCTION_CODE (decl))
12427 case BUILT_IN_ABS:
12428 case BUILT_IN_ALLOCA:
12429 case BUILT_IN_ALLOCA_WITH_ALIGN:
12430 case BUILT_IN_BSWAP16:
12431 case BUILT_IN_BSWAP32:
12432 case BUILT_IN_BSWAP64:
12433 case BUILT_IN_CLZ:
12434 case BUILT_IN_CLZIMAX:
12435 case BUILT_IN_CLZL:
12436 case BUILT_IN_CLZLL:
12437 case BUILT_IN_CTZ:
12438 case BUILT_IN_CTZIMAX:
12439 case BUILT_IN_CTZL:
12440 case BUILT_IN_CTZLL:
12441 case BUILT_IN_FFS:
12442 case BUILT_IN_FFSIMAX:
12443 case BUILT_IN_FFSL:
12444 case BUILT_IN_FFSLL:
12445 case BUILT_IN_IMAXABS:
12446 case BUILT_IN_FINITE:
12447 case BUILT_IN_FINITEF:
12448 case BUILT_IN_FINITEL:
12449 case BUILT_IN_FINITED32:
12450 case BUILT_IN_FINITED64:
12451 case BUILT_IN_FINITED128:
12452 case BUILT_IN_FPCLASSIFY:
12453 case BUILT_IN_ISFINITE:
12454 case BUILT_IN_ISINF_SIGN:
12455 case BUILT_IN_ISINF:
12456 case BUILT_IN_ISINFF:
12457 case BUILT_IN_ISINFL:
12458 case BUILT_IN_ISINFD32:
12459 case BUILT_IN_ISINFD64:
12460 case BUILT_IN_ISINFD128:
12461 case BUILT_IN_ISNAN:
12462 case BUILT_IN_ISNANF:
12463 case BUILT_IN_ISNANL:
12464 case BUILT_IN_ISNAND32:
12465 case BUILT_IN_ISNAND64:
12466 case BUILT_IN_ISNAND128:
12467 case BUILT_IN_ISNORMAL:
12468 case BUILT_IN_ISGREATER:
12469 case BUILT_IN_ISGREATEREQUAL:
12470 case BUILT_IN_ISLESS:
12471 case BUILT_IN_ISLESSEQUAL:
12472 case BUILT_IN_ISLESSGREATER:
12473 case BUILT_IN_ISUNORDERED:
12474 case BUILT_IN_VA_ARG_PACK:
12475 case BUILT_IN_VA_ARG_PACK_LEN:
12476 case BUILT_IN_VA_COPY:
12477 case BUILT_IN_TRAP:
12478 case BUILT_IN_SAVEREGS:
12479 case BUILT_IN_POPCOUNTL:
12480 case BUILT_IN_POPCOUNTLL:
12481 case BUILT_IN_POPCOUNTIMAX:
12482 case BUILT_IN_POPCOUNT:
12483 case BUILT_IN_PARITYL:
12484 case BUILT_IN_PARITYLL:
12485 case BUILT_IN_PARITYIMAX:
12486 case BUILT_IN_PARITY:
12487 case BUILT_IN_LABS:
12488 case BUILT_IN_LLABS:
12489 case BUILT_IN_PREFETCH:
12490 case BUILT_IN_ACC_ON_DEVICE:
12491 return true;
12493 default:
12494 return is_simple_builtin (decl);
12497 return false;