[PR c++/87185] ICE in prune-lambdas
[official-gcc.git] / gcc / builtins.c
blob58ea7475ef7bb2a8abad2463b896efaa8fd79650
1 /* Expand builtin functions.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "params.h"
35 #include "tm_p.h"
36 #include "stringpool.h"
37 #include "tree-vrp.h"
38 #include "tree-ssanames.h"
39 #include "expmed.h"
40 #include "optabs.h"
41 #include "emit-rtl.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "alias.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "tree-object-size.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
76 struct target_builtins default_target_builtins;
77 #if SWITCHABLE_TARGET
78 struct target_builtins *this_target_builtins = &default_target_builtins;
79 #endif
81 /* Define the names of the builtin function types and codes. */
82 const char *const built_in_class_names[BUILT_IN_LAST]
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
86 const char * built_in_names[(int) END_BUILTINS] =
88 #include "builtins.def"
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
98 static rtx c_readstr (const char *, scalar_int_mode);
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx inline_expand_builtin_string_cmp (tree, rtx);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
126 static rtx expand_builtin_memchr (tree, rtx);
127 static rtx expand_builtin_memcpy (tree, rtx);
128 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 rtx target, tree exp, int endp);
130 static rtx expand_builtin_memmove (tree, rtx);
131 static rtx expand_builtin_mempcpy (tree, rtx);
132 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
133 static rtx expand_builtin_strcat (tree, rtx);
134 static rtx expand_builtin_strcpy (tree, rtx);
135 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
136 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
137 static rtx expand_builtin_stpncpy (tree, rtx);
138 static rtx expand_builtin_strncat (tree, rtx);
139 static rtx expand_builtin_strncpy (tree, rtx);
140 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
141 static rtx expand_builtin_memset (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
146 static rtx expand_builtin_alloca (tree);
147 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
148 static rtx expand_builtin_frame_address (tree, tree);
149 static tree stabilize_va_list_loc (location_t, tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static rtx expand_builtin_expect_with_probability (tree, rtx);
152 static tree fold_builtin_constant_p (tree);
153 static tree fold_builtin_classify_type (tree);
154 static tree fold_builtin_strlen (location_t, tree, tree);
155 static tree fold_builtin_inf (location_t, tree, int);
156 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
157 static bool validate_arg (const_tree, enum tree_code code);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
161 static tree fold_builtin_isascii (location_t, tree);
162 static tree fold_builtin_toascii (location_t, tree);
163 static tree fold_builtin_isdigit (location_t, tree);
164 static tree fold_builtin_fabs (location_t, tree, tree);
165 static tree fold_builtin_abs (location_t, tree, tree);
166 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
167 enum tree_code);
168 static tree fold_builtin_0 (location_t, tree);
169 static tree fold_builtin_1 (location_t, tree, tree);
170 static tree fold_builtin_2 (location_t, tree, tree, tree);
171 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
172 static tree fold_builtin_varargs (location_t, tree, tree*, int);
174 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
175 static tree fold_builtin_strspn (location_t, tree, tree);
176 static tree fold_builtin_strcspn (location_t, tree, tree);
178 static rtx expand_builtin_object_size (tree);
179 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
180 enum built_in_function);
181 static void maybe_emit_chk_warning (tree, enum built_in_function);
182 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_free_warning (tree);
184 static tree fold_builtin_object_size (tree, tree);
186 unsigned HOST_WIDE_INT target_newline;
187 unsigned HOST_WIDE_INT target_percent;
188 static unsigned HOST_WIDE_INT target_c;
189 static unsigned HOST_WIDE_INT target_s;
190 char target_percent_c[3];
191 char target_percent_s[3];
192 char target_percent_s_newline[4];
193 static tree do_mpfr_remquo (tree, tree, tree);
194 static tree do_mpfr_lgamma_r (tree, tree, tree);
195 static void expand_builtin_sync_synchronize (void);
197 /* Return true if NAME starts with __builtin_ or __sync_. */
199 static bool
200 is_builtin_name (const char *name)
202 if (strncmp (name, "__builtin_", 10) == 0)
203 return true;
204 if (strncmp (name, "__sync_", 7) == 0)
205 return true;
206 if (strncmp (name, "__atomic_", 9) == 0)
207 return true;
208 return false;
211 /* Return true if NODE should be considered for inline expansion regardless
212 of the optimization level. This means whenever a function is invoked with
213 its "internal" name, which normally contains the prefix "__builtin". */
215 bool
216 called_as_built_in (tree node)
218 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
219 we want the name used to call the function, not the name it
220 will have. */
221 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
222 return is_builtin_name (name);
225 /* Compute values M and N such that M divides (address of EXP - N) and such
226 that N < M. If these numbers can be determined, store M in alignp and N in
227 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
228 *alignp and any bit-offset to *bitposp.
230 Note that the address (and thus the alignment) computed here is based
231 on the address to which a symbol resolves, whereas DECL_ALIGN is based
232 on the address at which an object is actually located. These two
233 addresses are not always the same. For example, on ARM targets,
234 the address &foo of a Thumb function foo() has the lowest bit set,
235 whereas foo() itself starts on an even address.
237 If ADDR_P is true we are taking the address of the memory reference EXP
238 and thus cannot rely on the access taking place. */
240 static bool
241 get_object_alignment_2 (tree exp, unsigned int *alignp,
242 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
244 poly_int64 bitsize, bitpos;
245 tree offset;
246 machine_mode mode;
247 int unsignedp, reversep, volatilep;
248 unsigned int align = BITS_PER_UNIT;
249 bool known_alignment = false;
251 /* Get the innermost object and the constant (bitpos) and possibly
252 variable (offset) offset of the access. */
253 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
254 &unsignedp, &reversep, &volatilep);
256 /* Extract alignment information from the innermost object and
257 possibly adjust bitpos and offset. */
258 if (TREE_CODE (exp) == FUNCTION_DECL)
260 /* Function addresses can encode extra information besides their
261 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
262 allows the low bit to be used as a virtual bit, we know
263 that the address itself must be at least 2-byte aligned. */
264 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
265 align = 2 * BITS_PER_UNIT;
267 else if (TREE_CODE (exp) == LABEL_DECL)
269 else if (TREE_CODE (exp) == CONST_DECL)
271 /* The alignment of a CONST_DECL is determined by its initializer. */
272 exp = DECL_INITIAL (exp);
273 align = TYPE_ALIGN (TREE_TYPE (exp));
274 if (CONSTANT_CLASS_P (exp))
275 align = targetm.constant_alignment (exp, align);
277 known_alignment = true;
279 else if (DECL_P (exp))
281 align = DECL_ALIGN (exp);
282 known_alignment = true;
284 else if (TREE_CODE (exp) == INDIRECT_REF
285 || TREE_CODE (exp) == MEM_REF
286 || TREE_CODE (exp) == TARGET_MEM_REF)
288 tree addr = TREE_OPERAND (exp, 0);
289 unsigned ptr_align;
290 unsigned HOST_WIDE_INT ptr_bitpos;
291 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
293 /* If the address is explicitely aligned, handle that. */
294 if (TREE_CODE (addr) == BIT_AND_EXPR
295 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
297 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
298 ptr_bitmask *= BITS_PER_UNIT;
299 align = least_bit_hwi (ptr_bitmask);
300 addr = TREE_OPERAND (addr, 0);
303 known_alignment
304 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
305 align = MAX (ptr_align, align);
307 /* Re-apply explicit alignment to the bitpos. */
308 ptr_bitpos &= ptr_bitmask;
310 /* The alignment of the pointer operand in a TARGET_MEM_REF
311 has to take the variable offset parts into account. */
312 if (TREE_CODE (exp) == TARGET_MEM_REF)
314 if (TMR_INDEX (exp))
316 unsigned HOST_WIDE_INT step = 1;
317 if (TMR_STEP (exp))
318 step = TREE_INT_CST_LOW (TMR_STEP (exp));
319 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
321 if (TMR_INDEX2 (exp))
322 align = BITS_PER_UNIT;
323 known_alignment = false;
326 /* When EXP is an actual memory reference then we can use
327 TYPE_ALIGN of a pointer indirection to derive alignment.
328 Do so only if get_pointer_alignment_1 did not reveal absolute
329 alignment knowledge and if using that alignment would
330 improve the situation. */
331 unsigned int talign;
332 if (!addr_p && !known_alignment
333 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
334 && talign > align)
335 align = talign;
336 else
338 /* Else adjust bitpos accordingly. */
339 bitpos += ptr_bitpos;
340 if (TREE_CODE (exp) == MEM_REF
341 || TREE_CODE (exp) == TARGET_MEM_REF)
342 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
345 else if (TREE_CODE (exp) == STRING_CST)
347 /* STRING_CST are the only constant objects we allow to be not
348 wrapped inside a CONST_DECL. */
349 align = TYPE_ALIGN (TREE_TYPE (exp));
350 if (CONSTANT_CLASS_P (exp))
351 align = targetm.constant_alignment (exp, align);
353 known_alignment = true;
356 /* If there is a non-constant offset part extract the maximum
357 alignment that can prevail. */
358 if (offset)
360 unsigned int trailing_zeros = tree_ctz (offset);
361 if (trailing_zeros < HOST_BITS_PER_INT)
363 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
364 if (inner)
365 align = MIN (align, inner);
369 /* Account for the alignment of runtime coefficients, so that the constant
370 bitpos is guaranteed to be accurate. */
371 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
372 if (alt_align != 0 && alt_align < align)
374 align = alt_align;
375 known_alignment = false;
378 *alignp = align;
379 *bitposp = bitpos.coeffs[0] & (align - 1);
380 return known_alignment;
383 /* For a memory reference expression EXP compute values M and N such that M
384 divides (&EXP - N) and such that N < M. If these numbers can be determined,
385 store M in alignp and N in *BITPOSP and return true. Otherwise return false
386 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
388 bool
389 get_object_alignment_1 (tree exp, unsigned int *alignp,
390 unsigned HOST_WIDE_INT *bitposp)
392 return get_object_alignment_2 (exp, alignp, bitposp, false);
395 /* Return the alignment in bits of EXP, an object. */
397 unsigned int
398 get_object_alignment (tree exp)
400 unsigned HOST_WIDE_INT bitpos = 0;
401 unsigned int align;
403 get_object_alignment_1 (exp, &align, &bitpos);
405 /* align and bitpos now specify known low bits of the pointer.
406 ptr & (align - 1) == bitpos. */
408 if (bitpos != 0)
409 align = least_bit_hwi (bitpos);
410 return align;
413 /* For a pointer valued expression EXP compute values M and N such that M
414 divides (EXP - N) and such that N < M. If these numbers can be determined,
415 store M in alignp and N in *BITPOSP and return true. Return false if
416 the results are just a conservative approximation.
418 If EXP is not a pointer, false is returned too. */
420 bool
421 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
422 unsigned HOST_WIDE_INT *bitposp)
424 STRIP_NOPS (exp);
426 if (TREE_CODE (exp) == ADDR_EXPR)
427 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
428 alignp, bitposp, true);
429 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
431 unsigned int align;
432 unsigned HOST_WIDE_INT bitpos;
433 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
434 &align, &bitpos);
435 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
436 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
437 else
439 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
440 if (trailing_zeros < HOST_BITS_PER_INT)
442 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
443 if (inner)
444 align = MIN (align, inner);
447 *alignp = align;
448 *bitposp = bitpos & (align - 1);
449 return res;
451 else if (TREE_CODE (exp) == SSA_NAME
452 && POINTER_TYPE_P (TREE_TYPE (exp)))
454 unsigned int ptr_align, ptr_misalign;
455 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
457 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
459 *bitposp = ptr_misalign * BITS_PER_UNIT;
460 *alignp = ptr_align * BITS_PER_UNIT;
461 /* Make sure to return a sensible alignment when the multiplication
462 by BITS_PER_UNIT overflowed. */
463 if (*alignp == 0)
464 *alignp = 1u << (HOST_BITS_PER_INT - 1);
465 /* We cannot really tell whether this result is an approximation. */
466 return false;
468 else
470 *bitposp = 0;
471 *alignp = BITS_PER_UNIT;
472 return false;
475 else if (TREE_CODE (exp) == INTEGER_CST)
477 *alignp = BIGGEST_ALIGNMENT;
478 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
479 & (BIGGEST_ALIGNMENT - 1));
480 return true;
483 *bitposp = 0;
484 *alignp = BITS_PER_UNIT;
485 return false;
488 /* Return the alignment in bits of EXP, a pointer valued expression.
489 The alignment returned is, by default, the alignment of the thing that
490 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
492 Otherwise, look at the expression to see if we can do better, i.e., if the
493 expression is actually pointing at an object whose alignment is tighter. */
495 unsigned int
496 get_pointer_alignment (tree exp)
498 unsigned HOST_WIDE_INT bitpos = 0;
499 unsigned int align;
501 get_pointer_alignment_1 (exp, &align, &bitpos);
503 /* align and bitpos now specify known low bits of the pointer.
504 ptr & (align - 1) == bitpos. */
506 if (bitpos != 0)
507 align = least_bit_hwi (bitpos);
509 return align;
512 /* Return the number of leading non-zero elements in the sequence
513 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
514 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
516 unsigned
517 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
519 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
521 unsigned n;
523 if (eltsize == 1)
525 /* Optimize the common case of plain char. */
526 for (n = 0; n < maxelts; n++)
528 const char *elt = (const char*) ptr + n;
529 if (!*elt)
530 break;
533 else
535 for (n = 0; n < maxelts; n++)
537 const char *elt = (const char*) ptr + n * eltsize;
538 if (!memcmp (elt, "\0\0\0\0", eltsize))
539 break;
542 return n;
545 /* Compute the length of a null-terminated character string or wide
546 character string handling character sizes of 1, 2, and 4 bytes.
547 TREE_STRING_LENGTH is not the right way because it evaluates to
548 the size of the character array in bytes (as opposed to characters)
549 and because it can contain a zero byte in the middle.
551 ONLY_VALUE should be nonzero if the result is not going to be emitted
552 into the instruction stream and zero if it is going to be expanded.
553 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
554 is returned, otherwise NULL, since
555 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
556 evaluate the side-effects.
558 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
559 accesses. Note that this implies the result is not going to be emitted
560 into the instruction stream.
562 ELTSIZE is 1 for normal single byte character strings, and 2 or
563 4 for wide characer strings. ELTSIZE is by default 1.
565 The value returned is of type `ssizetype'. */
567 tree
568 c_strlen (tree src, int only_value, unsigned eltsize)
570 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
571 STRIP_NOPS (src);
572 if (TREE_CODE (src) == COND_EXPR
573 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
575 tree len1, len2;
577 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, eltsize);
578 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, eltsize);
579 if (tree_int_cst_equal (len1, len2))
580 return len1;
583 if (TREE_CODE (src) == COMPOUND_EXPR
584 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
585 return c_strlen (TREE_OPERAND (src, 1), only_value, eltsize);
587 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
589 /* Offset from the beginning of the string in bytes. */
590 tree byteoff;
591 tree memsize;
592 src = string_constant (src, &byteoff, &memsize, NULL);
593 if (src == 0)
594 return NULL_TREE;
596 /* Determine the size of the string element. */
597 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
598 return NULL_TREE;
600 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
601 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
602 in case the latter is less than the size of the array, such as when
603 SRC refers to a short string literal used to initialize a large array.
604 In that case, the elements of the array after the terminating NUL are
605 all NUL. */
606 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
607 strelts = strelts / eltsize - 1;
609 if (!tree_fits_uhwi_p (memsize))
610 return NULL_TREE;
612 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize - 1;
614 /* PTR can point to the byte representation of any string type, including
615 char* and wchar_t*. */
616 const char *ptr = TREE_STRING_POINTER (src);
618 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
620 /* For empty strings the result should be zero. */
621 if (maxelts == 0)
622 return ssize_int (0);
624 /* The code below works only for single byte character types. */
625 if (eltsize != 1)
626 return NULL_TREE;
628 /* If the string has an internal NUL character followed by any
629 non-NUL characters (e.g., "foo\0bar"), we can't compute
630 the offset to the following NUL if we don't know where to
631 start searching for it. */
632 unsigned len = string_length (ptr, eltsize, strelts);
634 /* Return when an embedded null character is found or none at all. */
635 if (len < strelts || len > maxelts)
636 return NULL_TREE;
638 /* We don't know the starting offset, but we do know that the string
639 has no internal zero bytes. If the offset falls within the bounds
640 of the string subtract the offset from the length of the string,
641 and return that. Otherwise the length is zero. Take care to
642 use SAVE_EXPR in case the OFFSET has side-effects. */
643 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff) : byteoff;
644 offsave = fold_convert (ssizetype, offsave);
645 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
646 build_int_cst (ssizetype, len));
647 tree lenexp = size_diffop_loc (loc, ssize_int (strelts), offsave);
648 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
649 build_zero_cst (ssizetype));
652 /* Offset from the beginning of the string in elements. */
653 HOST_WIDE_INT eltoff;
655 /* We have a known offset into the string. Start searching there for
656 a null character if we can represent it as a single HOST_WIDE_INT. */
657 if (byteoff == 0)
658 eltoff = 0;
659 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
660 eltoff = -1;
661 else
662 eltoff = tree_to_uhwi (byteoff) / eltsize;
664 /* If the offset is known to be out of bounds, warn, and call strlen at
665 runtime. */
666 if (eltoff < 0 || eltoff > maxelts)
668 /* Suppress multiple warnings for propagated constant strings. */
669 if (only_value != 2
670 && !TREE_NO_WARNING (src))
672 warning_at (loc, OPT_Warray_bounds,
673 "offset %qwi outside bounds of constant string",
674 eltoff);
675 TREE_NO_WARNING (src) = 1;
677 return NULL_TREE;
680 /* If eltoff is larger than strelts but less than maxelts the
681 string length is zero, since the excess memory will be zero. */
682 if (eltoff > strelts)
683 return ssize_int (0);
685 /* Use strlen to search for the first zero byte. Since any strings
686 constructed with build_string will have nulls appended, we win even
687 if we get handed something like (char[4])"abcd".
689 Since ELTOFF is our starting index into the string, no further
690 calculation is needed. */
691 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
692 strelts - eltoff);
694 /* Don't know what to return if there was no zero termination.
695 Ideally this would turn into a gcc_checking_assert over time. */
696 if (len > maxelts - eltoff)
697 return NULL_TREE;
699 return ssize_int (len);
702 /* Return a constant integer corresponding to target reading
703 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
705 static rtx
706 c_readstr (const char *str, scalar_int_mode mode)
708 HOST_WIDE_INT ch;
709 unsigned int i, j;
710 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
712 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
713 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
714 / HOST_BITS_PER_WIDE_INT;
716 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
717 for (i = 0; i < len; i++)
718 tmp[i] = 0;
720 ch = 1;
721 for (i = 0; i < GET_MODE_SIZE (mode); i++)
723 j = i;
724 if (WORDS_BIG_ENDIAN)
725 j = GET_MODE_SIZE (mode) - i - 1;
726 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
727 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
728 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
729 j *= BITS_PER_UNIT;
731 if (ch)
732 ch = (unsigned char) str[i];
733 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
736 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
737 return immed_wide_int_const (c, mode);
740 /* Cast a target constant CST to target CHAR and if that value fits into
741 host char type, return zero and put that value into variable pointed to by
742 P. */
744 static int
745 target_char_cast (tree cst, char *p)
747 unsigned HOST_WIDE_INT val, hostval;
749 if (TREE_CODE (cst) != INTEGER_CST
750 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
751 return 1;
753 /* Do not care if it fits or not right here. */
754 val = TREE_INT_CST_LOW (cst);
756 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
757 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
759 hostval = val;
760 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
761 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
763 if (val != hostval)
764 return 1;
766 *p = hostval;
767 return 0;
770 /* Similar to save_expr, but assumes that arbitrary code is not executed
771 in between the multiple evaluations. In particular, we assume that a
772 non-addressable local variable will not be modified. */
774 static tree
775 builtin_save_expr (tree exp)
777 if (TREE_CODE (exp) == SSA_NAME
778 || (TREE_ADDRESSABLE (exp) == 0
779 && (TREE_CODE (exp) == PARM_DECL
780 || (VAR_P (exp) && !TREE_STATIC (exp)))))
781 return exp;
783 return save_expr (exp);
786 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
787 times to get the address of either a higher stack frame, or a return
788 address located within it (depending on FNDECL_CODE). */
790 static rtx
791 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
793 int i;
794 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
795 if (tem == NULL_RTX)
797 /* For a zero count with __builtin_return_address, we don't care what
798 frame address we return, because target-specific definitions will
799 override us. Therefore frame pointer elimination is OK, and using
800 the soft frame pointer is OK.
802 For a nonzero count, or a zero count with __builtin_frame_address,
803 we require a stable offset from the current frame pointer to the
804 previous one, so we must use the hard frame pointer, and
805 we must disable frame pointer elimination. */
806 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
807 tem = frame_pointer_rtx;
808 else
810 tem = hard_frame_pointer_rtx;
812 /* Tell reload not to eliminate the frame pointer. */
813 crtl->accesses_prior_frames = 1;
817 if (count > 0)
818 SETUP_FRAME_ADDRESSES ();
820 /* On the SPARC, the return address is not in the frame, it is in a
821 register. There is no way to access it off of the current frame
822 pointer, but it can be accessed off the previous frame pointer by
823 reading the value from the register window save area. */
824 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
825 count--;
827 /* Scan back COUNT frames to the specified frame. */
828 for (i = 0; i < count; i++)
830 /* Assume the dynamic chain pointer is in the word that the
831 frame address points to, unless otherwise specified. */
832 tem = DYNAMIC_CHAIN_ADDRESS (tem);
833 tem = memory_address (Pmode, tem);
834 tem = gen_frame_mem (Pmode, tem);
835 tem = copy_to_reg (tem);
838 /* For __builtin_frame_address, return what we've got. But, on
839 the SPARC for example, we may have to add a bias. */
840 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
841 return FRAME_ADDR_RTX (tem);
843 /* For __builtin_return_address, get the return address from that frame. */
844 #ifdef RETURN_ADDR_RTX
845 tem = RETURN_ADDR_RTX (count, tem);
846 #else
847 tem = memory_address (Pmode,
848 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
849 tem = gen_frame_mem (Pmode, tem);
850 #endif
851 return tem;
854 /* Alias set used for setjmp buffer. */
855 static alias_set_type setjmp_alias_set = -1;
857 /* Construct the leading half of a __builtin_setjmp call. Control will
858 return to RECEIVER_LABEL. This is also called directly by the SJLJ
859 exception handling code. */
861 void
862 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
864 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
865 rtx stack_save;
866 rtx mem;
868 if (setjmp_alias_set == -1)
869 setjmp_alias_set = new_alias_set ();
871 buf_addr = convert_memory_address (Pmode, buf_addr);
873 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
875 /* We store the frame pointer and the address of receiver_label in
876 the buffer and use the rest of it for the stack save area, which
877 is machine-dependent. */
879 mem = gen_rtx_MEM (Pmode, buf_addr);
880 set_mem_alias_set (mem, setjmp_alias_set);
881 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
883 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
884 GET_MODE_SIZE (Pmode))),
885 set_mem_alias_set (mem, setjmp_alias_set);
887 emit_move_insn (validize_mem (mem),
888 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
890 stack_save = gen_rtx_MEM (sa_mode,
891 plus_constant (Pmode, buf_addr,
892 2 * GET_MODE_SIZE (Pmode)));
893 set_mem_alias_set (stack_save, setjmp_alias_set);
894 emit_stack_save (SAVE_NONLOCAL, &stack_save);
896 /* If there is further processing to do, do it. */
897 if (targetm.have_builtin_setjmp_setup ())
898 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
900 /* We have a nonlocal label. */
901 cfun->has_nonlocal_label = 1;
904 /* Construct the trailing part of a __builtin_setjmp call. This is
905 also called directly by the SJLJ exception handling code.
906 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
908 void
909 expand_builtin_setjmp_receiver (rtx receiver_label)
911 rtx chain;
913 /* Mark the FP as used when we get here, so we have to make sure it's
914 marked as used by this function. */
915 emit_use (hard_frame_pointer_rtx);
917 /* Mark the static chain as clobbered here so life information
918 doesn't get messed up for it. */
919 chain = rtx_for_static_chain (current_function_decl, true);
920 if (chain && REG_P (chain))
921 emit_clobber (chain);
923 /* Now put in the code to restore the frame pointer, and argument
924 pointer, if needed. */
925 if (! targetm.have_nonlocal_goto ())
927 /* First adjust our frame pointer to its actual value. It was
928 previously set to the start of the virtual area corresponding to
929 the stacked variables when we branched here and now needs to be
930 adjusted to the actual hardware fp value.
932 Assignments to virtual registers are converted by
933 instantiate_virtual_regs into the corresponding assignment
934 to the underlying register (fp in this case) that makes
935 the original assignment true.
936 So the following insn will actually be decrementing fp by
937 TARGET_STARTING_FRAME_OFFSET. */
938 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
940 /* Restoring the frame pointer also modifies the hard frame pointer.
941 Mark it used (so that the previous assignment remains live once
942 the frame pointer is eliminated) and clobbered (to represent the
943 implicit update from the assignment). */
944 emit_use (hard_frame_pointer_rtx);
945 emit_clobber (hard_frame_pointer_rtx);
948 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
950 /* If the argument pointer can be eliminated in favor of the
951 frame pointer, we don't need to restore it. We assume here
952 that if such an elimination is present, it can always be used.
953 This is the case on all known machines; if we don't make this
954 assumption, we do unnecessary saving on many machines. */
955 size_t i;
956 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
958 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
959 if (elim_regs[i].from == ARG_POINTER_REGNUM
960 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
961 break;
963 if (i == ARRAY_SIZE (elim_regs))
965 /* Now restore our arg pointer from the address at which it
966 was saved in our stack frame. */
967 emit_move_insn (crtl->args.internal_arg_pointer,
968 copy_to_reg (get_arg_pointer_save_area ()));
972 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
973 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
974 else if (targetm.have_nonlocal_goto_receiver ())
975 emit_insn (targetm.gen_nonlocal_goto_receiver ());
976 else
977 { /* Nothing */ }
979 /* We must not allow the code we just generated to be reordered by
980 scheduling. Specifically, the update of the frame pointer must
981 happen immediately, not later. */
982 emit_insn (gen_blockage ());
985 /* __builtin_longjmp is passed a pointer to an array of five words (not
986 all will be used on all machines). It operates similarly to the C
987 library function of the same name, but is more efficient. Much of
988 the code below is copied from the handling of non-local gotos. */
990 static void
991 expand_builtin_longjmp (rtx buf_addr, rtx value)
993 rtx fp, lab, stack;
994 rtx_insn *insn, *last;
995 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
997 /* DRAP is needed for stack realign if longjmp is expanded to current
998 function */
999 if (SUPPORTS_STACK_ALIGNMENT)
1000 crtl->need_drap = true;
1002 if (setjmp_alias_set == -1)
1003 setjmp_alias_set = new_alias_set ();
1005 buf_addr = convert_memory_address (Pmode, buf_addr);
1007 buf_addr = force_reg (Pmode, buf_addr);
1009 /* We require that the user must pass a second argument of 1, because
1010 that is what builtin_setjmp will return. */
1011 gcc_assert (value == const1_rtx);
1013 last = get_last_insn ();
1014 if (targetm.have_builtin_longjmp ())
1015 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1016 else
1018 fp = gen_rtx_MEM (Pmode, buf_addr);
1019 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1020 GET_MODE_SIZE (Pmode)));
1022 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1023 2 * GET_MODE_SIZE (Pmode)));
1024 set_mem_alias_set (fp, setjmp_alias_set);
1025 set_mem_alias_set (lab, setjmp_alias_set);
1026 set_mem_alias_set (stack, setjmp_alias_set);
1028 /* Pick up FP, label, and SP from the block and jump. This code is
1029 from expand_goto in stmt.c; see there for detailed comments. */
1030 if (targetm.have_nonlocal_goto ())
1031 /* We have to pass a value to the nonlocal_goto pattern that will
1032 get copied into the static_chain pointer, but it does not matter
1033 what that value is, because builtin_setjmp does not use it. */
1034 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1035 else
1037 lab = copy_to_reg (lab);
1039 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1040 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1042 emit_move_insn (hard_frame_pointer_rtx, fp);
1043 emit_stack_restore (SAVE_NONLOCAL, stack);
1045 emit_use (hard_frame_pointer_rtx);
1046 emit_use (stack_pointer_rtx);
1047 emit_indirect_jump (lab);
1051 /* Search backwards and mark the jump insn as a non-local goto.
1052 Note that this precludes the use of __builtin_longjmp to a
1053 __builtin_setjmp target in the same function. However, we've
1054 already cautioned the user that these functions are for
1055 internal exception handling use only. */
1056 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1058 gcc_assert (insn != last);
1060 if (JUMP_P (insn))
1062 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1063 break;
1065 else if (CALL_P (insn))
1066 break;
1070 static inline bool
1071 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1073 return (iter->i < iter->n);
1076 /* This function validates the types of a function call argument list
1077 against a specified list of tree_codes. If the last specifier is a 0,
1078 that represents an ellipsis, otherwise the last specifier must be a
1079 VOID_TYPE. */
1081 static bool
1082 validate_arglist (const_tree callexpr, ...)
1084 enum tree_code code;
1085 bool res = 0;
1086 va_list ap;
1087 const_call_expr_arg_iterator iter;
1088 const_tree arg;
1090 va_start (ap, callexpr);
1091 init_const_call_expr_arg_iterator (callexpr, &iter);
1093 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1094 tree fn = CALL_EXPR_FN (callexpr);
1095 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1097 for (unsigned argno = 1; ; ++argno)
1099 code = (enum tree_code) va_arg (ap, int);
1101 switch (code)
1103 case 0:
1104 /* This signifies an ellipses, any further arguments are all ok. */
1105 res = true;
1106 goto end;
1107 case VOID_TYPE:
1108 /* This signifies an endlink, if no arguments remain, return
1109 true, otherwise return false. */
1110 res = !more_const_call_expr_args_p (&iter);
1111 goto end;
1112 case POINTER_TYPE:
1113 /* The actual argument must be nonnull when either the whole
1114 called function has been declared nonnull, or when the formal
1115 argument corresponding to the actual argument has been. */
1116 if (argmap
1117 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1119 arg = next_const_call_expr_arg (&iter);
1120 if (!validate_arg (arg, code) || integer_zerop (arg))
1121 goto end;
1122 break;
1124 /* FALLTHRU */
1125 default:
1126 /* If no parameters remain or the parameter's code does not
1127 match the specified code, return false. Otherwise continue
1128 checking any remaining arguments. */
1129 arg = next_const_call_expr_arg (&iter);
1130 if (!validate_arg (arg, code))
1131 goto end;
1132 break;
1136 /* We need gotos here since we can only have one VA_CLOSE in a
1137 function. */
1138 end: ;
1139 va_end (ap);
1141 BITMAP_FREE (argmap);
1143 return res;
1146 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1147 and the address of the save area. */
1149 static rtx
1150 expand_builtin_nonlocal_goto (tree exp)
1152 tree t_label, t_save_area;
1153 rtx r_label, r_save_area, r_fp, r_sp;
1154 rtx_insn *insn;
1156 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1157 return NULL_RTX;
1159 t_label = CALL_EXPR_ARG (exp, 0);
1160 t_save_area = CALL_EXPR_ARG (exp, 1);
1162 r_label = expand_normal (t_label);
1163 r_label = convert_memory_address (Pmode, r_label);
1164 r_save_area = expand_normal (t_save_area);
1165 r_save_area = convert_memory_address (Pmode, r_save_area);
1166 /* Copy the address of the save location to a register just in case it was
1167 based on the frame pointer. */
1168 r_save_area = copy_to_reg (r_save_area);
1169 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1170 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1171 plus_constant (Pmode, r_save_area,
1172 GET_MODE_SIZE (Pmode)));
1174 crtl->has_nonlocal_goto = 1;
1176 /* ??? We no longer need to pass the static chain value, afaik. */
1177 if (targetm.have_nonlocal_goto ())
1178 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1179 else
1181 r_label = copy_to_reg (r_label);
1183 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1184 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1186 /* Restore frame pointer for containing function. */
1187 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1188 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1190 /* USE of hard_frame_pointer_rtx added for consistency;
1191 not clear if really needed. */
1192 emit_use (hard_frame_pointer_rtx);
1193 emit_use (stack_pointer_rtx);
1195 /* If the architecture is using a GP register, we must
1196 conservatively assume that the target function makes use of it.
1197 The prologue of functions with nonlocal gotos must therefore
1198 initialize the GP register to the appropriate value, and we
1199 must then make sure that this value is live at the point
1200 of the jump. (Note that this doesn't necessarily apply
1201 to targets with a nonlocal_goto pattern; they are free
1202 to implement it in their own way. Note also that this is
1203 a no-op if the GP register is a global invariant.) */
1204 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1205 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1206 emit_use (pic_offset_table_rtx);
1208 emit_indirect_jump (r_label);
1211 /* Search backwards to the jump insn and mark it as a
1212 non-local goto. */
1213 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1215 if (JUMP_P (insn))
1217 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1218 break;
1220 else if (CALL_P (insn))
1221 break;
1224 return const0_rtx;
1227 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1228 (not all will be used on all machines) that was passed to __builtin_setjmp.
1229 It updates the stack pointer in that block to the current value. This is
1230 also called directly by the SJLJ exception handling code. */
1232 void
1233 expand_builtin_update_setjmp_buf (rtx buf_addr)
1235 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1236 buf_addr = convert_memory_address (Pmode, buf_addr);
1237 rtx stack_save
1238 = gen_rtx_MEM (sa_mode,
1239 memory_address
1240 (sa_mode,
1241 plus_constant (Pmode, buf_addr,
1242 2 * GET_MODE_SIZE (Pmode))));
1244 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1247 /* Expand a call to __builtin_prefetch. For a target that does not support
1248 data prefetch, evaluate the memory address argument in case it has side
1249 effects. */
1251 static void
1252 expand_builtin_prefetch (tree exp)
1254 tree arg0, arg1, arg2;
1255 int nargs;
1256 rtx op0, op1, op2;
1258 if (!validate_arglist (exp, POINTER_TYPE, 0))
1259 return;
1261 arg0 = CALL_EXPR_ARG (exp, 0);
1263 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1264 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1265 locality). */
1266 nargs = call_expr_nargs (exp);
1267 if (nargs > 1)
1268 arg1 = CALL_EXPR_ARG (exp, 1);
1269 else
1270 arg1 = integer_zero_node;
1271 if (nargs > 2)
1272 arg2 = CALL_EXPR_ARG (exp, 2);
1273 else
1274 arg2 = integer_three_node;
1276 /* Argument 0 is an address. */
1277 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1279 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1280 if (TREE_CODE (arg1) != INTEGER_CST)
1282 error ("second argument to %<__builtin_prefetch%> must be a constant");
1283 arg1 = integer_zero_node;
1285 op1 = expand_normal (arg1);
1286 /* Argument 1 must be either zero or one. */
1287 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1289 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1290 " using zero");
1291 op1 = const0_rtx;
1294 /* Argument 2 (locality) must be a compile-time constant int. */
1295 if (TREE_CODE (arg2) != INTEGER_CST)
1297 error ("third argument to %<__builtin_prefetch%> must be a constant");
1298 arg2 = integer_zero_node;
1300 op2 = expand_normal (arg2);
1301 /* Argument 2 must be 0, 1, 2, or 3. */
1302 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1304 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1305 op2 = const0_rtx;
1308 if (targetm.have_prefetch ())
1310 struct expand_operand ops[3];
1312 create_address_operand (&ops[0], op0);
1313 create_integer_operand (&ops[1], INTVAL (op1));
1314 create_integer_operand (&ops[2], INTVAL (op2));
1315 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1316 return;
1319 /* Don't do anything with direct references to volatile memory, but
1320 generate code to handle other side effects. */
1321 if (!MEM_P (op0) && side_effects_p (op0))
1322 emit_insn (op0);
1325 /* Get a MEM rtx for expression EXP which is the address of an operand
1326 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1327 the maximum length of the block of memory that might be accessed or
1328 NULL if unknown. */
1330 static rtx
1331 get_memory_rtx (tree exp, tree len)
1333 tree orig_exp = exp;
1334 rtx addr, mem;
1336 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1337 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1338 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1339 exp = TREE_OPERAND (exp, 0);
1341 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1342 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1344 /* Get an expression we can use to find the attributes to assign to MEM.
1345 First remove any nops. */
1346 while (CONVERT_EXPR_P (exp)
1347 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1348 exp = TREE_OPERAND (exp, 0);
1350 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1351 (as builtin stringops may alias with anything). */
1352 exp = fold_build2 (MEM_REF,
1353 build_array_type (char_type_node,
1354 build_range_type (sizetype,
1355 size_one_node, len)),
1356 exp, build_int_cst (ptr_type_node, 0));
1358 /* If the MEM_REF has no acceptable address, try to get the base object
1359 from the original address we got, and build an all-aliasing
1360 unknown-sized access to that one. */
1361 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1362 set_mem_attributes (mem, exp, 0);
1363 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1364 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1365 0))))
1367 exp = build_fold_addr_expr (exp);
1368 exp = fold_build2 (MEM_REF,
1369 build_array_type (char_type_node,
1370 build_range_type (sizetype,
1371 size_zero_node,
1372 NULL)),
1373 exp, build_int_cst (ptr_type_node, 0));
1374 set_mem_attributes (mem, exp, 0);
1376 set_mem_alias_set (mem, 0);
1377 return mem;
1380 /* Built-in functions to perform an untyped call and return. */
1382 #define apply_args_mode \
1383 (this_target_builtins->x_apply_args_mode)
1384 #define apply_result_mode \
1385 (this_target_builtins->x_apply_result_mode)
1387 /* Return the size required for the block returned by __builtin_apply_args,
1388 and initialize apply_args_mode. */
1390 static int
1391 apply_args_size (void)
1393 static int size = -1;
1394 int align;
1395 unsigned int regno;
1397 /* The values computed by this function never change. */
1398 if (size < 0)
1400 /* The first value is the incoming arg-pointer. */
1401 size = GET_MODE_SIZE (Pmode);
1403 /* The second value is the structure value address unless this is
1404 passed as an "invisible" first argument. */
1405 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1406 size += GET_MODE_SIZE (Pmode);
1408 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1409 if (FUNCTION_ARG_REGNO_P (regno))
1411 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1413 gcc_assert (mode != VOIDmode);
1415 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1416 if (size % align != 0)
1417 size = CEIL (size, align) * align;
1418 size += GET_MODE_SIZE (mode);
1419 apply_args_mode[regno] = mode;
1421 else
1423 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1426 return size;
1429 /* Return the size required for the block returned by __builtin_apply,
1430 and initialize apply_result_mode. */
1432 static int
1433 apply_result_size (void)
1435 static int size = -1;
1436 int align, regno;
1438 /* The values computed by this function never change. */
1439 if (size < 0)
1441 size = 0;
1443 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1444 if (targetm.calls.function_value_regno_p (regno))
1446 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1448 gcc_assert (mode != VOIDmode);
1450 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1451 if (size % align != 0)
1452 size = CEIL (size, align) * align;
1453 size += GET_MODE_SIZE (mode);
1454 apply_result_mode[regno] = mode;
1456 else
1457 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1459 /* Allow targets that use untyped_call and untyped_return to override
1460 the size so that machine-specific information can be stored here. */
1461 #ifdef APPLY_RESULT_SIZE
1462 size = APPLY_RESULT_SIZE;
1463 #endif
1465 return size;
1468 /* Create a vector describing the result block RESULT. If SAVEP is true,
1469 the result block is used to save the values; otherwise it is used to
1470 restore the values. */
1472 static rtx
1473 result_vector (int savep, rtx result)
1475 int regno, size, align, nelts;
1476 fixed_size_mode mode;
1477 rtx reg, mem;
1478 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1480 size = nelts = 0;
1481 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1482 if ((mode = apply_result_mode[regno]) != VOIDmode)
1484 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1485 if (size % align != 0)
1486 size = CEIL (size, align) * align;
1487 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1488 mem = adjust_address (result, mode, size);
1489 savevec[nelts++] = (savep
1490 ? gen_rtx_SET (mem, reg)
1491 : gen_rtx_SET (reg, mem));
1492 size += GET_MODE_SIZE (mode);
1494 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1497 /* Save the state required to perform an untyped call with the same
1498 arguments as were passed to the current function. */
1500 static rtx
1501 expand_builtin_apply_args_1 (void)
1503 rtx registers, tem;
1504 int size, align, regno;
1505 fixed_size_mode mode;
1506 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1508 /* Create a block where the arg-pointer, structure value address,
1509 and argument registers can be saved. */
1510 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1512 /* Walk past the arg-pointer and structure value address. */
1513 size = GET_MODE_SIZE (Pmode);
1514 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1515 size += GET_MODE_SIZE (Pmode);
1517 /* Save each register used in calling a function to the block. */
1518 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1519 if ((mode = apply_args_mode[regno]) != VOIDmode)
1521 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1522 if (size % align != 0)
1523 size = CEIL (size, align) * align;
1525 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1527 emit_move_insn (adjust_address (registers, mode, size), tem);
1528 size += GET_MODE_SIZE (mode);
1531 /* Save the arg pointer to the block. */
1532 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1533 /* We need the pointer as the caller actually passed them to us, not
1534 as we might have pretended they were passed. Make sure it's a valid
1535 operand, as emit_move_insn isn't expected to handle a PLUS. */
1536 if (STACK_GROWS_DOWNWARD)
1538 = force_operand (plus_constant (Pmode, tem,
1539 crtl->args.pretend_args_size),
1540 NULL_RTX);
1541 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1543 size = GET_MODE_SIZE (Pmode);
1545 /* Save the structure value address unless this is passed as an
1546 "invisible" first argument. */
1547 if (struct_incoming_value)
1549 emit_move_insn (adjust_address (registers, Pmode, size),
1550 copy_to_reg (struct_incoming_value));
1551 size += GET_MODE_SIZE (Pmode);
1554 /* Return the address of the block. */
1555 return copy_addr_to_reg (XEXP (registers, 0));
1558 /* __builtin_apply_args returns block of memory allocated on
1559 the stack into which is stored the arg pointer, structure
1560 value address, static chain, and all the registers that might
1561 possibly be used in performing a function call. The code is
1562 moved to the start of the function so the incoming values are
1563 saved. */
1565 static rtx
1566 expand_builtin_apply_args (void)
1568 /* Don't do __builtin_apply_args more than once in a function.
1569 Save the result of the first call and reuse it. */
1570 if (apply_args_value != 0)
1571 return apply_args_value;
1573 /* When this function is called, it means that registers must be
1574 saved on entry to this function. So we migrate the
1575 call to the first insn of this function. */
1576 rtx temp;
1578 start_sequence ();
1579 temp = expand_builtin_apply_args_1 ();
1580 rtx_insn *seq = get_insns ();
1581 end_sequence ();
1583 apply_args_value = temp;
1585 /* Put the insns after the NOTE that starts the function.
1586 If this is inside a start_sequence, make the outer-level insn
1587 chain current, so the code is placed at the start of the
1588 function. If internal_arg_pointer is a non-virtual pseudo,
1589 it needs to be placed after the function that initializes
1590 that pseudo. */
1591 push_topmost_sequence ();
1592 if (REG_P (crtl->args.internal_arg_pointer)
1593 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1594 emit_insn_before (seq, parm_birth_insn);
1595 else
1596 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1597 pop_topmost_sequence ();
1598 return temp;
1602 /* Perform an untyped call and save the state required to perform an
1603 untyped return of whatever value was returned by the given function. */
1605 static rtx
1606 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1608 int size, align, regno;
1609 fixed_size_mode mode;
1610 rtx incoming_args, result, reg, dest, src;
1611 rtx_call_insn *call_insn;
1612 rtx old_stack_level = 0;
1613 rtx call_fusage = 0;
1614 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1616 arguments = convert_memory_address (Pmode, arguments);
1618 /* Create a block where the return registers can be saved. */
1619 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1621 /* Fetch the arg pointer from the ARGUMENTS block. */
1622 incoming_args = gen_reg_rtx (Pmode);
1623 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1624 if (!STACK_GROWS_DOWNWARD)
1625 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1626 incoming_args, 0, OPTAB_LIB_WIDEN);
1628 /* Push a new argument block and copy the arguments. Do not allow
1629 the (potential) memcpy call below to interfere with our stack
1630 manipulations. */
1631 do_pending_stack_adjust ();
1632 NO_DEFER_POP;
1634 /* Save the stack with nonlocal if available. */
1635 if (targetm.have_save_stack_nonlocal ())
1636 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1637 else
1638 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1640 /* Allocate a block of memory onto the stack and copy the memory
1641 arguments to the outgoing arguments address. We can pass TRUE
1642 as the 4th argument because we just saved the stack pointer
1643 and will restore it right after the call. */
1644 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1646 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1647 may have already set current_function_calls_alloca to true.
1648 current_function_calls_alloca won't be set if argsize is zero,
1649 so we have to guarantee need_drap is true here. */
1650 if (SUPPORTS_STACK_ALIGNMENT)
1651 crtl->need_drap = true;
1653 dest = virtual_outgoing_args_rtx;
1654 if (!STACK_GROWS_DOWNWARD)
1656 if (CONST_INT_P (argsize))
1657 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1658 else
1659 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1661 dest = gen_rtx_MEM (BLKmode, dest);
1662 set_mem_align (dest, PARM_BOUNDARY);
1663 src = gen_rtx_MEM (BLKmode, incoming_args);
1664 set_mem_align (src, PARM_BOUNDARY);
1665 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1667 /* Refer to the argument block. */
1668 apply_args_size ();
1669 arguments = gen_rtx_MEM (BLKmode, arguments);
1670 set_mem_align (arguments, PARM_BOUNDARY);
1672 /* Walk past the arg-pointer and structure value address. */
1673 size = GET_MODE_SIZE (Pmode);
1674 if (struct_value)
1675 size += GET_MODE_SIZE (Pmode);
1677 /* Restore each of the registers previously saved. Make USE insns
1678 for each of these registers for use in making the call. */
1679 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1680 if ((mode = apply_args_mode[regno]) != VOIDmode)
1682 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1683 if (size % align != 0)
1684 size = CEIL (size, align) * align;
1685 reg = gen_rtx_REG (mode, regno);
1686 emit_move_insn (reg, adjust_address (arguments, mode, size));
1687 use_reg (&call_fusage, reg);
1688 size += GET_MODE_SIZE (mode);
1691 /* Restore the structure value address unless this is passed as an
1692 "invisible" first argument. */
1693 size = GET_MODE_SIZE (Pmode);
1694 if (struct_value)
1696 rtx value = gen_reg_rtx (Pmode);
1697 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1698 emit_move_insn (struct_value, value);
1699 if (REG_P (struct_value))
1700 use_reg (&call_fusage, struct_value);
1701 size += GET_MODE_SIZE (Pmode);
1704 /* All arguments and registers used for the call are set up by now! */
1705 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1707 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1708 and we don't want to load it into a register as an optimization,
1709 because prepare_call_address already did it if it should be done. */
1710 if (GET_CODE (function) != SYMBOL_REF)
1711 function = memory_address (FUNCTION_MODE, function);
1713 /* Generate the actual call instruction and save the return value. */
1714 if (targetm.have_untyped_call ())
1716 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1717 emit_call_insn (targetm.gen_untyped_call (mem, result,
1718 result_vector (1, result)));
1720 else if (targetm.have_call_value ())
1722 rtx valreg = 0;
1724 /* Locate the unique return register. It is not possible to
1725 express a call that sets more than one return register using
1726 call_value; use untyped_call for that. In fact, untyped_call
1727 only needs to save the return registers in the given block. */
1728 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1729 if ((mode = apply_result_mode[regno]) != VOIDmode)
1731 gcc_assert (!valreg); /* have_untyped_call required. */
1733 valreg = gen_rtx_REG (mode, regno);
1736 emit_insn (targetm.gen_call_value (valreg,
1737 gen_rtx_MEM (FUNCTION_MODE, function),
1738 const0_rtx, NULL_RTX, const0_rtx));
1740 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1742 else
1743 gcc_unreachable ();
1745 /* Find the CALL insn we just emitted, and attach the register usage
1746 information. */
1747 call_insn = last_call_insn ();
1748 add_function_usage_to (call_insn, call_fusage);
1750 /* Restore the stack. */
1751 if (targetm.have_save_stack_nonlocal ())
1752 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1753 else
1754 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1755 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1757 OK_DEFER_POP;
1759 /* Return the address of the result block. */
1760 result = copy_addr_to_reg (XEXP (result, 0));
1761 return convert_memory_address (ptr_mode, result);
1764 /* Perform an untyped return. */
1766 static void
1767 expand_builtin_return (rtx result)
1769 int size, align, regno;
1770 fixed_size_mode mode;
1771 rtx reg;
1772 rtx_insn *call_fusage = 0;
1774 result = convert_memory_address (Pmode, result);
1776 apply_result_size ();
1777 result = gen_rtx_MEM (BLKmode, result);
1779 if (targetm.have_untyped_return ())
1781 rtx vector = result_vector (0, result);
1782 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1783 emit_barrier ();
1784 return;
1787 /* Restore the return value and note that each value is used. */
1788 size = 0;
1789 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1790 if ((mode = apply_result_mode[regno]) != VOIDmode)
1792 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1793 if (size % align != 0)
1794 size = CEIL (size, align) * align;
1795 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1796 emit_move_insn (reg, adjust_address (result, mode, size));
1798 push_to_sequence (call_fusage);
1799 emit_use (reg);
1800 call_fusage = get_insns ();
1801 end_sequence ();
1802 size += GET_MODE_SIZE (mode);
1805 /* Put the USE insns before the return. */
1806 emit_insn (call_fusage);
1808 /* Return whatever values was restored by jumping directly to the end
1809 of the function. */
1810 expand_naked_return ();
1813 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1815 static enum type_class
1816 type_to_class (tree type)
1818 switch (TREE_CODE (type))
1820 case VOID_TYPE: return void_type_class;
1821 case INTEGER_TYPE: return integer_type_class;
1822 case ENUMERAL_TYPE: return enumeral_type_class;
1823 case BOOLEAN_TYPE: return boolean_type_class;
1824 case POINTER_TYPE: return pointer_type_class;
1825 case REFERENCE_TYPE: return reference_type_class;
1826 case OFFSET_TYPE: return offset_type_class;
1827 case REAL_TYPE: return real_type_class;
1828 case COMPLEX_TYPE: return complex_type_class;
1829 case FUNCTION_TYPE: return function_type_class;
1830 case METHOD_TYPE: return method_type_class;
1831 case RECORD_TYPE: return record_type_class;
1832 case UNION_TYPE:
1833 case QUAL_UNION_TYPE: return union_type_class;
1834 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1835 ? string_type_class : array_type_class);
1836 case LANG_TYPE: return lang_type_class;
1837 default: return no_type_class;
1841 /* Expand a call EXP to __builtin_classify_type. */
1843 static rtx
1844 expand_builtin_classify_type (tree exp)
1846 if (call_expr_nargs (exp))
1847 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1848 return GEN_INT (no_type_class);
1851 /* This helper macro, meant to be used in mathfn_built_in below, determines
1852 which among a set of builtin math functions is appropriate for a given type
1853 mode. The `F' (float) and `L' (long double) are automatically generated
1854 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1855 types, there are additional types that are considered with 'F32', 'F64',
1856 'F128', etc. suffixes. */
1857 #define CASE_MATHFN(MATHFN) \
1858 CASE_CFN_##MATHFN: \
1859 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1860 fcodel = BUILT_IN_##MATHFN##L ; break;
1861 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1862 types. */
1863 #define CASE_MATHFN_FLOATN(MATHFN) \
1864 CASE_CFN_##MATHFN: \
1865 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1866 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1867 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1868 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1869 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1870 break;
1871 /* Similar to above, but appends _R after any F/L suffix. */
1872 #define CASE_MATHFN_REENT(MATHFN) \
1873 case CFN_BUILT_IN_##MATHFN##_R: \
1874 case CFN_BUILT_IN_##MATHFN##F_R: \
1875 case CFN_BUILT_IN_##MATHFN##L_R: \
1876 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1877 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1879 /* Return a function equivalent to FN but operating on floating-point
1880 values of type TYPE, or END_BUILTINS if no such function exists.
1881 This is purely an operation on function codes; it does not guarantee
1882 that the target actually has an implementation of the function. */
1884 static built_in_function
1885 mathfn_built_in_2 (tree type, combined_fn fn)
1887 tree mtype;
1888 built_in_function fcode, fcodef, fcodel;
1889 built_in_function fcodef16 = END_BUILTINS;
1890 built_in_function fcodef32 = END_BUILTINS;
1891 built_in_function fcodef64 = END_BUILTINS;
1892 built_in_function fcodef128 = END_BUILTINS;
1893 built_in_function fcodef32x = END_BUILTINS;
1894 built_in_function fcodef64x = END_BUILTINS;
1895 built_in_function fcodef128x = END_BUILTINS;
1897 switch (fn)
1899 CASE_MATHFN (ACOS)
1900 CASE_MATHFN (ACOSH)
1901 CASE_MATHFN (ASIN)
1902 CASE_MATHFN (ASINH)
1903 CASE_MATHFN (ATAN)
1904 CASE_MATHFN (ATAN2)
1905 CASE_MATHFN (ATANH)
1906 CASE_MATHFN (CBRT)
1907 CASE_MATHFN_FLOATN (CEIL)
1908 CASE_MATHFN (CEXPI)
1909 CASE_MATHFN_FLOATN (COPYSIGN)
1910 CASE_MATHFN (COS)
1911 CASE_MATHFN (COSH)
1912 CASE_MATHFN (DREM)
1913 CASE_MATHFN (ERF)
1914 CASE_MATHFN (ERFC)
1915 CASE_MATHFN (EXP)
1916 CASE_MATHFN (EXP10)
1917 CASE_MATHFN (EXP2)
1918 CASE_MATHFN (EXPM1)
1919 CASE_MATHFN (FABS)
1920 CASE_MATHFN (FDIM)
1921 CASE_MATHFN_FLOATN (FLOOR)
1922 CASE_MATHFN_FLOATN (FMA)
1923 CASE_MATHFN_FLOATN (FMAX)
1924 CASE_MATHFN_FLOATN (FMIN)
1925 CASE_MATHFN (FMOD)
1926 CASE_MATHFN (FREXP)
1927 CASE_MATHFN (GAMMA)
1928 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1929 CASE_MATHFN (HUGE_VAL)
1930 CASE_MATHFN (HYPOT)
1931 CASE_MATHFN (ILOGB)
1932 CASE_MATHFN (ICEIL)
1933 CASE_MATHFN (IFLOOR)
1934 CASE_MATHFN (INF)
1935 CASE_MATHFN (IRINT)
1936 CASE_MATHFN (IROUND)
1937 CASE_MATHFN (ISINF)
1938 CASE_MATHFN (J0)
1939 CASE_MATHFN (J1)
1940 CASE_MATHFN (JN)
1941 CASE_MATHFN (LCEIL)
1942 CASE_MATHFN (LDEXP)
1943 CASE_MATHFN (LFLOOR)
1944 CASE_MATHFN (LGAMMA)
1945 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1946 CASE_MATHFN (LLCEIL)
1947 CASE_MATHFN (LLFLOOR)
1948 CASE_MATHFN (LLRINT)
1949 CASE_MATHFN (LLROUND)
1950 CASE_MATHFN (LOG)
1951 CASE_MATHFN (LOG10)
1952 CASE_MATHFN (LOG1P)
1953 CASE_MATHFN (LOG2)
1954 CASE_MATHFN (LOGB)
1955 CASE_MATHFN (LRINT)
1956 CASE_MATHFN (LROUND)
1957 CASE_MATHFN (MODF)
1958 CASE_MATHFN (NAN)
1959 CASE_MATHFN (NANS)
1960 CASE_MATHFN_FLOATN (NEARBYINT)
1961 CASE_MATHFN (NEXTAFTER)
1962 CASE_MATHFN (NEXTTOWARD)
1963 CASE_MATHFN (POW)
1964 CASE_MATHFN (POWI)
1965 CASE_MATHFN (POW10)
1966 CASE_MATHFN (REMAINDER)
1967 CASE_MATHFN (REMQUO)
1968 CASE_MATHFN_FLOATN (RINT)
1969 CASE_MATHFN_FLOATN (ROUND)
1970 CASE_MATHFN (SCALB)
1971 CASE_MATHFN (SCALBLN)
1972 CASE_MATHFN (SCALBN)
1973 CASE_MATHFN (SIGNBIT)
1974 CASE_MATHFN (SIGNIFICAND)
1975 CASE_MATHFN (SIN)
1976 CASE_MATHFN (SINCOS)
1977 CASE_MATHFN (SINH)
1978 CASE_MATHFN_FLOATN (SQRT)
1979 CASE_MATHFN (TAN)
1980 CASE_MATHFN (TANH)
1981 CASE_MATHFN (TGAMMA)
1982 CASE_MATHFN_FLOATN (TRUNC)
1983 CASE_MATHFN (Y0)
1984 CASE_MATHFN (Y1)
1985 CASE_MATHFN (YN)
1987 default:
1988 return END_BUILTINS;
1991 mtype = TYPE_MAIN_VARIANT (type);
1992 if (mtype == double_type_node)
1993 return fcode;
1994 else if (mtype == float_type_node)
1995 return fcodef;
1996 else if (mtype == long_double_type_node)
1997 return fcodel;
1998 else if (mtype == float16_type_node)
1999 return fcodef16;
2000 else if (mtype == float32_type_node)
2001 return fcodef32;
2002 else if (mtype == float64_type_node)
2003 return fcodef64;
2004 else if (mtype == float128_type_node)
2005 return fcodef128;
2006 else if (mtype == float32x_type_node)
2007 return fcodef32x;
2008 else if (mtype == float64x_type_node)
2009 return fcodef64x;
2010 else if (mtype == float128x_type_node)
2011 return fcodef128x;
2012 else
2013 return END_BUILTINS;
2016 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2017 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2018 otherwise use the explicit declaration. If we can't do the conversion,
2019 return null. */
2021 static tree
2022 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2024 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2025 if (fcode2 == END_BUILTINS)
2026 return NULL_TREE;
2028 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2029 return NULL_TREE;
2031 return builtin_decl_explicit (fcode2);
2034 /* Like mathfn_built_in_1, but always use the implicit array. */
2036 tree
2037 mathfn_built_in (tree type, combined_fn fn)
2039 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2042 /* Like mathfn_built_in_1, but take a built_in_function and
2043 always use the implicit array. */
2045 tree
2046 mathfn_built_in (tree type, enum built_in_function fn)
2048 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2051 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2052 return its code, otherwise return IFN_LAST. Note that this function
2053 only tests whether the function is defined in internals.def, not whether
2054 it is actually available on the target. */
2056 internal_fn
2057 associated_internal_fn (tree fndecl)
2059 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2060 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2061 switch (DECL_FUNCTION_CODE (fndecl))
2063 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2064 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2065 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2066 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2067 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2068 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2069 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2070 #include "internal-fn.def"
2072 CASE_FLT_FN (BUILT_IN_POW10):
2073 return IFN_EXP10;
2075 CASE_FLT_FN (BUILT_IN_DREM):
2076 return IFN_REMAINDER;
2078 CASE_FLT_FN (BUILT_IN_SCALBN):
2079 CASE_FLT_FN (BUILT_IN_SCALBLN):
2080 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2081 return IFN_LDEXP;
2082 return IFN_LAST;
2084 default:
2085 return IFN_LAST;
2089 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2090 on the current target by a call to an internal function, return the
2091 code of that internal function, otherwise return IFN_LAST. The caller
2092 is responsible for ensuring that any side-effects of the built-in
2093 call are dealt with correctly. E.g. if CALL sets errno, the caller
2094 must decide that the errno result isn't needed or make it available
2095 in some other way. */
2097 internal_fn
2098 replacement_internal_fn (gcall *call)
2100 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2102 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2103 if (ifn != IFN_LAST)
2105 tree_pair types = direct_internal_fn_types (ifn, call);
2106 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2107 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2108 return ifn;
2111 return IFN_LAST;
2114 /* Expand a call to the builtin trinary math functions (fma).
2115 Return NULL_RTX if a normal call should be emitted rather than expanding the
2116 function in-line. EXP is the expression that is a call to the builtin
2117 function; if convenient, the result should be placed in TARGET.
2118 SUBTARGET may be used as the target for computing one of EXP's
2119 operands. */
2121 static rtx
2122 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2124 optab builtin_optab;
2125 rtx op0, op1, op2, result;
2126 rtx_insn *insns;
2127 tree fndecl = get_callee_fndecl (exp);
2128 tree arg0, arg1, arg2;
2129 machine_mode mode;
2131 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2132 return NULL_RTX;
2134 arg0 = CALL_EXPR_ARG (exp, 0);
2135 arg1 = CALL_EXPR_ARG (exp, 1);
2136 arg2 = CALL_EXPR_ARG (exp, 2);
2138 switch (DECL_FUNCTION_CODE (fndecl))
2140 CASE_FLT_FN (BUILT_IN_FMA):
2141 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2142 builtin_optab = fma_optab; break;
2143 default:
2144 gcc_unreachable ();
2147 /* Make a suitable register to place result in. */
2148 mode = TYPE_MODE (TREE_TYPE (exp));
2150 /* Before working hard, check whether the instruction is available. */
2151 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2152 return NULL_RTX;
2154 result = gen_reg_rtx (mode);
2156 /* Always stabilize the argument list. */
2157 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2158 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2159 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2161 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2162 op1 = expand_normal (arg1);
2163 op2 = expand_normal (arg2);
2165 start_sequence ();
2167 /* Compute into RESULT.
2168 Set RESULT to wherever the result comes back. */
2169 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2170 result, 0);
2172 /* If we were unable to expand via the builtin, stop the sequence
2173 (without outputting the insns) and call to the library function
2174 with the stabilized argument list. */
2175 if (result == 0)
2177 end_sequence ();
2178 return expand_call (exp, target, target == const0_rtx);
2181 /* Output the entire sequence. */
2182 insns = get_insns ();
2183 end_sequence ();
2184 emit_insn (insns);
2186 return result;
2189 /* Expand a call to the builtin sin and cos math functions.
2190 Return NULL_RTX if a normal call should be emitted rather than expanding the
2191 function in-line. EXP is the expression that is a call to the builtin
2192 function; if convenient, the result should be placed in TARGET.
2193 SUBTARGET may be used as the target for computing one of EXP's
2194 operands. */
2196 static rtx
2197 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2199 optab builtin_optab;
2200 rtx op0;
2201 rtx_insn *insns;
2202 tree fndecl = get_callee_fndecl (exp);
2203 machine_mode mode;
2204 tree arg;
2206 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2207 return NULL_RTX;
2209 arg = CALL_EXPR_ARG (exp, 0);
2211 switch (DECL_FUNCTION_CODE (fndecl))
2213 CASE_FLT_FN (BUILT_IN_SIN):
2214 CASE_FLT_FN (BUILT_IN_COS):
2215 builtin_optab = sincos_optab; break;
2216 default:
2217 gcc_unreachable ();
2220 /* Make a suitable register to place result in. */
2221 mode = TYPE_MODE (TREE_TYPE (exp));
2223 /* Check if sincos insn is available, otherwise fallback
2224 to sin or cos insn. */
2225 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2226 switch (DECL_FUNCTION_CODE (fndecl))
2228 CASE_FLT_FN (BUILT_IN_SIN):
2229 builtin_optab = sin_optab; break;
2230 CASE_FLT_FN (BUILT_IN_COS):
2231 builtin_optab = cos_optab; break;
2232 default:
2233 gcc_unreachable ();
2236 /* Before working hard, check whether the instruction is available. */
2237 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2239 rtx result = gen_reg_rtx (mode);
2241 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2242 need to expand the argument again. This way, we will not perform
2243 side-effects more the once. */
2244 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2246 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2248 start_sequence ();
2250 /* Compute into RESULT.
2251 Set RESULT to wherever the result comes back. */
2252 if (builtin_optab == sincos_optab)
2254 int ok;
2256 switch (DECL_FUNCTION_CODE (fndecl))
2258 CASE_FLT_FN (BUILT_IN_SIN):
2259 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2260 break;
2261 CASE_FLT_FN (BUILT_IN_COS):
2262 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2263 break;
2264 default:
2265 gcc_unreachable ();
2267 gcc_assert (ok);
2269 else
2270 result = expand_unop (mode, builtin_optab, op0, result, 0);
2272 if (result != 0)
2274 /* Output the entire sequence. */
2275 insns = get_insns ();
2276 end_sequence ();
2277 emit_insn (insns);
2278 return result;
2281 /* If we were unable to expand via the builtin, stop the sequence
2282 (without outputting the insns) and call to the library function
2283 with the stabilized argument list. */
2284 end_sequence ();
2287 return expand_call (exp, target, target == const0_rtx);
2290 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2291 return an RTL instruction code that implements the functionality.
2292 If that isn't possible or available return CODE_FOR_nothing. */
2294 static enum insn_code
2295 interclass_mathfn_icode (tree arg, tree fndecl)
2297 bool errno_set = false;
2298 optab builtin_optab = unknown_optab;
2299 machine_mode mode;
2301 switch (DECL_FUNCTION_CODE (fndecl))
2303 CASE_FLT_FN (BUILT_IN_ILOGB):
2304 errno_set = true; builtin_optab = ilogb_optab; break;
2305 CASE_FLT_FN (BUILT_IN_ISINF):
2306 builtin_optab = isinf_optab; break;
2307 case BUILT_IN_ISNORMAL:
2308 case BUILT_IN_ISFINITE:
2309 CASE_FLT_FN (BUILT_IN_FINITE):
2310 case BUILT_IN_FINITED32:
2311 case BUILT_IN_FINITED64:
2312 case BUILT_IN_FINITED128:
2313 case BUILT_IN_ISINFD32:
2314 case BUILT_IN_ISINFD64:
2315 case BUILT_IN_ISINFD128:
2316 /* These builtins have no optabs (yet). */
2317 break;
2318 default:
2319 gcc_unreachable ();
2322 /* There's no easy way to detect the case we need to set EDOM. */
2323 if (flag_errno_math && errno_set)
2324 return CODE_FOR_nothing;
2326 /* Optab mode depends on the mode of the input argument. */
2327 mode = TYPE_MODE (TREE_TYPE (arg));
2329 if (builtin_optab)
2330 return optab_handler (builtin_optab, mode);
2331 return CODE_FOR_nothing;
2334 /* Expand a call to one of the builtin math functions that operate on
2335 floating point argument and output an integer result (ilogb, isinf,
2336 isnan, etc).
2337 Return 0 if a normal call should be emitted rather than expanding the
2338 function in-line. EXP is the expression that is a call to the builtin
2339 function; if convenient, the result should be placed in TARGET. */
2341 static rtx
2342 expand_builtin_interclass_mathfn (tree exp, rtx target)
2344 enum insn_code icode = CODE_FOR_nothing;
2345 rtx op0;
2346 tree fndecl = get_callee_fndecl (exp);
2347 machine_mode mode;
2348 tree arg;
2350 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2351 return NULL_RTX;
2353 arg = CALL_EXPR_ARG (exp, 0);
2354 icode = interclass_mathfn_icode (arg, fndecl);
2355 mode = TYPE_MODE (TREE_TYPE (arg));
2357 if (icode != CODE_FOR_nothing)
2359 struct expand_operand ops[1];
2360 rtx_insn *last = get_last_insn ();
2361 tree orig_arg = arg;
2363 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2364 need to expand the argument again. This way, we will not perform
2365 side-effects more the once. */
2366 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2368 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2370 if (mode != GET_MODE (op0))
2371 op0 = convert_to_mode (mode, op0, 0);
2373 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2374 if (maybe_legitimize_operands (icode, 0, 1, ops)
2375 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2376 return ops[0].value;
2378 delete_insns_since (last);
2379 CALL_EXPR_ARG (exp, 0) = orig_arg;
2382 return NULL_RTX;
2385 /* Expand a call to the builtin sincos math function.
2386 Return NULL_RTX if a normal call should be emitted rather than expanding the
2387 function in-line. EXP is the expression that is a call to the builtin
2388 function. */
2390 static rtx
2391 expand_builtin_sincos (tree exp)
2393 rtx op0, op1, op2, target1, target2;
2394 machine_mode mode;
2395 tree arg, sinp, cosp;
2396 int result;
2397 location_t loc = EXPR_LOCATION (exp);
2398 tree alias_type, alias_off;
2400 if (!validate_arglist (exp, REAL_TYPE,
2401 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2402 return NULL_RTX;
2404 arg = CALL_EXPR_ARG (exp, 0);
2405 sinp = CALL_EXPR_ARG (exp, 1);
2406 cosp = CALL_EXPR_ARG (exp, 2);
2408 /* Make a suitable register to place result in. */
2409 mode = TYPE_MODE (TREE_TYPE (arg));
2411 /* Check if sincos insn is available, otherwise emit the call. */
2412 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2413 return NULL_RTX;
2415 target1 = gen_reg_rtx (mode);
2416 target2 = gen_reg_rtx (mode);
2418 op0 = expand_normal (arg);
2419 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2420 alias_off = build_int_cst (alias_type, 0);
2421 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2422 sinp, alias_off));
2423 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2424 cosp, alias_off));
2426 /* Compute into target1 and target2.
2427 Set TARGET to wherever the result comes back. */
2428 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2429 gcc_assert (result);
2431 /* Move target1 and target2 to the memory locations indicated
2432 by op1 and op2. */
2433 emit_move_insn (op1, target1);
2434 emit_move_insn (op2, target2);
2436 return const0_rtx;
2439 /* Expand a call to the internal cexpi builtin to the sincos math function.
2440 EXP is the expression that is a call to the builtin function; if convenient,
2441 the result should be placed in TARGET. */
2443 static rtx
2444 expand_builtin_cexpi (tree exp, rtx target)
2446 tree fndecl = get_callee_fndecl (exp);
2447 tree arg, type;
2448 machine_mode mode;
2449 rtx op0, op1, op2;
2450 location_t loc = EXPR_LOCATION (exp);
2452 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2453 return NULL_RTX;
2455 arg = CALL_EXPR_ARG (exp, 0);
2456 type = TREE_TYPE (arg);
2457 mode = TYPE_MODE (TREE_TYPE (arg));
2459 /* Try expanding via a sincos optab, fall back to emitting a libcall
2460 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2461 is only generated from sincos, cexp or if we have either of them. */
2462 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2464 op1 = gen_reg_rtx (mode);
2465 op2 = gen_reg_rtx (mode);
2467 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2469 /* Compute into op1 and op2. */
2470 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2472 else if (targetm.libc_has_function (function_sincos))
2474 tree call, fn = NULL_TREE;
2475 tree top1, top2;
2476 rtx op1a, op2a;
2478 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2479 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2480 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2481 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2482 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2483 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2484 else
2485 gcc_unreachable ();
2487 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2488 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2489 op1a = copy_addr_to_reg (XEXP (op1, 0));
2490 op2a = copy_addr_to_reg (XEXP (op2, 0));
2491 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2492 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2494 /* Make sure not to fold the sincos call again. */
2495 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2496 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2497 call, 3, arg, top1, top2));
2499 else
2501 tree call, fn = NULL_TREE, narg;
2502 tree ctype = build_complex_type (type);
2504 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2505 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2506 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2507 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2508 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2509 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2510 else
2511 gcc_unreachable ();
2513 /* If we don't have a decl for cexp create one. This is the
2514 friendliest fallback if the user calls __builtin_cexpi
2515 without full target C99 function support. */
2516 if (fn == NULL_TREE)
2518 tree fntype;
2519 const char *name = NULL;
2521 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2522 name = "cexpf";
2523 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2524 name = "cexp";
2525 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2526 name = "cexpl";
2528 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2529 fn = build_fn_decl (name, fntype);
2532 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2533 build_real (type, dconst0), arg);
2535 /* Make sure not to fold the cexp call again. */
2536 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2537 return expand_expr (build_call_nary (ctype, call, 1, narg),
2538 target, VOIDmode, EXPAND_NORMAL);
2541 /* Now build the proper return type. */
2542 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2543 make_tree (TREE_TYPE (arg), op2),
2544 make_tree (TREE_TYPE (arg), op1)),
2545 target, VOIDmode, EXPAND_NORMAL);
2548 /* Conveniently construct a function call expression. FNDECL names the
2549 function to be called, N is the number of arguments, and the "..."
2550 parameters are the argument expressions. Unlike build_call_exr
2551 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2553 static tree
2554 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2556 va_list ap;
2557 tree fntype = TREE_TYPE (fndecl);
2558 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2560 va_start (ap, n);
2561 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2562 va_end (ap);
2563 SET_EXPR_LOCATION (fn, loc);
2564 return fn;
2567 /* Expand a call to one of the builtin rounding functions gcc defines
2568 as an extension (lfloor and lceil). As these are gcc extensions we
2569 do not need to worry about setting errno to EDOM.
2570 If expanding via optab fails, lower expression to (int)(floor(x)).
2571 EXP is the expression that is a call to the builtin function;
2572 if convenient, the result should be placed in TARGET. */
2574 static rtx
2575 expand_builtin_int_roundingfn (tree exp, rtx target)
2577 convert_optab builtin_optab;
2578 rtx op0, tmp;
2579 rtx_insn *insns;
2580 tree fndecl = get_callee_fndecl (exp);
2581 enum built_in_function fallback_fn;
2582 tree fallback_fndecl;
2583 machine_mode mode;
2584 tree arg;
2586 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2587 gcc_unreachable ();
2589 arg = CALL_EXPR_ARG (exp, 0);
2591 switch (DECL_FUNCTION_CODE (fndecl))
2593 CASE_FLT_FN (BUILT_IN_ICEIL):
2594 CASE_FLT_FN (BUILT_IN_LCEIL):
2595 CASE_FLT_FN (BUILT_IN_LLCEIL):
2596 builtin_optab = lceil_optab;
2597 fallback_fn = BUILT_IN_CEIL;
2598 break;
2600 CASE_FLT_FN (BUILT_IN_IFLOOR):
2601 CASE_FLT_FN (BUILT_IN_LFLOOR):
2602 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2603 builtin_optab = lfloor_optab;
2604 fallback_fn = BUILT_IN_FLOOR;
2605 break;
2607 default:
2608 gcc_unreachable ();
2611 /* Make a suitable register to place result in. */
2612 mode = TYPE_MODE (TREE_TYPE (exp));
2614 target = gen_reg_rtx (mode);
2616 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2617 need to expand the argument again. This way, we will not perform
2618 side-effects more the once. */
2619 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2621 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2623 start_sequence ();
2625 /* Compute into TARGET. */
2626 if (expand_sfix_optab (target, op0, builtin_optab))
2628 /* Output the entire sequence. */
2629 insns = get_insns ();
2630 end_sequence ();
2631 emit_insn (insns);
2632 return target;
2635 /* If we were unable to expand via the builtin, stop the sequence
2636 (without outputting the insns). */
2637 end_sequence ();
2639 /* Fall back to floating point rounding optab. */
2640 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2642 /* For non-C99 targets we may end up without a fallback fndecl here
2643 if the user called __builtin_lfloor directly. In this case emit
2644 a call to the floor/ceil variants nevertheless. This should result
2645 in the best user experience for not full C99 targets. */
2646 if (fallback_fndecl == NULL_TREE)
2648 tree fntype;
2649 const char *name = NULL;
2651 switch (DECL_FUNCTION_CODE (fndecl))
2653 case BUILT_IN_ICEIL:
2654 case BUILT_IN_LCEIL:
2655 case BUILT_IN_LLCEIL:
2656 name = "ceil";
2657 break;
2658 case BUILT_IN_ICEILF:
2659 case BUILT_IN_LCEILF:
2660 case BUILT_IN_LLCEILF:
2661 name = "ceilf";
2662 break;
2663 case BUILT_IN_ICEILL:
2664 case BUILT_IN_LCEILL:
2665 case BUILT_IN_LLCEILL:
2666 name = "ceill";
2667 break;
2668 case BUILT_IN_IFLOOR:
2669 case BUILT_IN_LFLOOR:
2670 case BUILT_IN_LLFLOOR:
2671 name = "floor";
2672 break;
2673 case BUILT_IN_IFLOORF:
2674 case BUILT_IN_LFLOORF:
2675 case BUILT_IN_LLFLOORF:
2676 name = "floorf";
2677 break;
2678 case BUILT_IN_IFLOORL:
2679 case BUILT_IN_LFLOORL:
2680 case BUILT_IN_LLFLOORL:
2681 name = "floorl";
2682 break;
2683 default:
2684 gcc_unreachable ();
2687 fntype = build_function_type_list (TREE_TYPE (arg),
2688 TREE_TYPE (arg), NULL_TREE);
2689 fallback_fndecl = build_fn_decl (name, fntype);
2692 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2694 tmp = expand_normal (exp);
2695 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2697 /* Truncate the result of floating point optab to integer
2698 via expand_fix (). */
2699 target = gen_reg_rtx (mode);
2700 expand_fix (target, tmp, 0);
2702 return target;
2705 /* Expand a call to one of the builtin math functions doing integer
2706 conversion (lrint).
2707 Return 0 if a normal call should be emitted rather than expanding the
2708 function in-line. EXP is the expression that is a call to the builtin
2709 function; if convenient, the result should be placed in TARGET. */
2711 static rtx
2712 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2714 convert_optab builtin_optab;
2715 rtx op0;
2716 rtx_insn *insns;
2717 tree fndecl = get_callee_fndecl (exp);
2718 tree arg;
2719 machine_mode mode;
2720 enum built_in_function fallback_fn = BUILT_IN_NONE;
2722 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2723 gcc_unreachable ();
2725 arg = CALL_EXPR_ARG (exp, 0);
2727 switch (DECL_FUNCTION_CODE (fndecl))
2729 CASE_FLT_FN (BUILT_IN_IRINT):
2730 fallback_fn = BUILT_IN_LRINT;
2731 gcc_fallthrough ();
2732 CASE_FLT_FN (BUILT_IN_LRINT):
2733 CASE_FLT_FN (BUILT_IN_LLRINT):
2734 builtin_optab = lrint_optab;
2735 break;
2737 CASE_FLT_FN (BUILT_IN_IROUND):
2738 fallback_fn = BUILT_IN_LROUND;
2739 gcc_fallthrough ();
2740 CASE_FLT_FN (BUILT_IN_LROUND):
2741 CASE_FLT_FN (BUILT_IN_LLROUND):
2742 builtin_optab = lround_optab;
2743 break;
2745 default:
2746 gcc_unreachable ();
2749 /* There's no easy way to detect the case we need to set EDOM. */
2750 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2751 return NULL_RTX;
2753 /* Make a suitable register to place result in. */
2754 mode = TYPE_MODE (TREE_TYPE (exp));
2756 /* There's no easy way to detect the case we need to set EDOM. */
2757 if (!flag_errno_math)
2759 rtx result = gen_reg_rtx (mode);
2761 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2762 need to expand the argument again. This way, we will not perform
2763 side-effects more the once. */
2764 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2766 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2768 start_sequence ();
2770 if (expand_sfix_optab (result, op0, builtin_optab))
2772 /* Output the entire sequence. */
2773 insns = get_insns ();
2774 end_sequence ();
2775 emit_insn (insns);
2776 return result;
2779 /* If we were unable to expand via the builtin, stop the sequence
2780 (without outputting the insns) and call to the library function
2781 with the stabilized argument list. */
2782 end_sequence ();
2785 if (fallback_fn != BUILT_IN_NONE)
2787 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2788 targets, (int) round (x) should never be transformed into
2789 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2790 a call to lround in the hope that the target provides at least some
2791 C99 functions. This should result in the best user experience for
2792 not full C99 targets. */
2793 tree fallback_fndecl = mathfn_built_in_1
2794 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2796 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2797 fallback_fndecl, 1, arg);
2799 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2800 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2801 return convert_to_mode (mode, target, 0);
2804 return expand_call (exp, target, target == const0_rtx);
2807 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2808 a normal call should be emitted rather than expanding the function
2809 in-line. EXP is the expression that is a call to the builtin
2810 function; if convenient, the result should be placed in TARGET. */
2812 static rtx
2813 expand_builtin_powi (tree exp, rtx target)
2815 tree arg0, arg1;
2816 rtx op0, op1;
2817 machine_mode mode;
2818 machine_mode mode2;
2820 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2821 return NULL_RTX;
2823 arg0 = CALL_EXPR_ARG (exp, 0);
2824 arg1 = CALL_EXPR_ARG (exp, 1);
2825 mode = TYPE_MODE (TREE_TYPE (exp));
2827 /* Emit a libcall to libgcc. */
2829 /* Mode of the 2nd argument must match that of an int. */
2830 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2832 if (target == NULL_RTX)
2833 target = gen_reg_rtx (mode);
2835 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2836 if (GET_MODE (op0) != mode)
2837 op0 = convert_to_mode (mode, op0, 0);
2838 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2839 if (GET_MODE (op1) != mode2)
2840 op1 = convert_to_mode (mode2, op1, 0);
2842 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2843 target, LCT_CONST, mode,
2844 op0, mode, op1, mode2);
2846 return target;
2849 /* Expand expression EXP which is a call to the strlen builtin. Return
2850 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2851 try to get the result in TARGET, if convenient. */
2853 static rtx
2854 expand_builtin_strlen (tree exp, rtx target,
2855 machine_mode target_mode)
2857 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2858 return NULL_RTX;
2860 struct expand_operand ops[4];
2861 rtx pat;
2862 tree len;
2863 tree src = CALL_EXPR_ARG (exp, 0);
2864 rtx src_reg;
2865 rtx_insn *before_strlen;
2866 machine_mode insn_mode;
2867 enum insn_code icode = CODE_FOR_nothing;
2868 unsigned int align;
2870 /* If the length can be computed at compile-time, return it. */
2871 len = c_strlen (src, 0);
2872 if (len)
2873 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2875 /* If the length can be computed at compile-time and is constant
2876 integer, but there are side-effects in src, evaluate
2877 src for side-effects, then return len.
2878 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2879 can be optimized into: i++; x = 3; */
2880 len = c_strlen (src, 1);
2881 if (len && TREE_CODE (len) == INTEGER_CST)
2883 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2884 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2887 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2889 /* If SRC is not a pointer type, don't do this operation inline. */
2890 if (align == 0)
2891 return NULL_RTX;
2893 /* Bail out if we can't compute strlen in the right mode. */
2894 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2896 icode = optab_handler (strlen_optab, insn_mode);
2897 if (icode != CODE_FOR_nothing)
2898 break;
2900 if (insn_mode == VOIDmode)
2901 return NULL_RTX;
2903 /* Make a place to hold the source address. We will not expand
2904 the actual source until we are sure that the expansion will
2905 not fail -- there are trees that cannot be expanded twice. */
2906 src_reg = gen_reg_rtx (Pmode);
2908 /* Mark the beginning of the strlen sequence so we can emit the
2909 source operand later. */
2910 before_strlen = get_last_insn ();
2912 create_output_operand (&ops[0], target, insn_mode);
2913 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2914 create_integer_operand (&ops[2], 0);
2915 create_integer_operand (&ops[3], align);
2916 if (!maybe_expand_insn (icode, 4, ops))
2917 return NULL_RTX;
2919 /* Check to see if the argument was declared attribute nonstring
2920 and if so, issue a warning since at this point it's not known
2921 to be nul-terminated. */
2922 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
2924 /* Now that we are assured of success, expand the source. */
2925 start_sequence ();
2926 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2927 if (pat != src_reg)
2929 #ifdef POINTERS_EXTEND_UNSIGNED
2930 if (GET_MODE (pat) != Pmode)
2931 pat = convert_to_mode (Pmode, pat,
2932 POINTERS_EXTEND_UNSIGNED);
2933 #endif
2934 emit_move_insn (src_reg, pat);
2936 pat = get_insns ();
2937 end_sequence ();
2939 if (before_strlen)
2940 emit_insn_after (pat, before_strlen);
2941 else
2942 emit_insn_before (pat, get_insns ());
2944 /* Return the value in the proper mode for this function. */
2945 if (GET_MODE (ops[0].value) == target_mode)
2946 target = ops[0].value;
2947 else if (target != 0)
2948 convert_move (target, ops[0].value, 0);
2949 else
2950 target = convert_to_mode (target_mode, ops[0].value, 0);
2952 return target;
2955 /* Expand call EXP to the strnlen built-in, returning the result
2956 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2958 static rtx
2959 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
2961 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2962 return NULL_RTX;
2964 tree src = CALL_EXPR_ARG (exp, 0);
2965 tree bound = CALL_EXPR_ARG (exp, 1);
2967 if (!bound)
2968 return NULL_RTX;
2970 location_t loc = UNKNOWN_LOCATION;
2971 if (EXPR_HAS_LOCATION (exp))
2972 loc = EXPR_LOCATION (exp);
2974 tree maxobjsize = max_object_size ();
2975 tree func = get_callee_fndecl (exp);
2977 tree len = c_strlen (src, 0);
2978 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
2979 so these conversions aren't necessary. */
2980 if (len)
2981 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
2983 if (TREE_CODE (bound) == INTEGER_CST)
2985 if (!TREE_NO_WARNING (exp)
2986 && tree_int_cst_lt (maxobjsize, bound)
2987 && warning_at (loc, OPT_Wstringop_overflow_,
2988 "%K%qD specified bound %E "
2989 "exceeds maximum object size %E",
2990 exp, func, bound, maxobjsize))
2991 TREE_NO_WARNING (exp) = true;
2993 if (!len || TREE_CODE (len) != INTEGER_CST)
2994 return NULL_RTX;
2996 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
2997 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3000 if (TREE_CODE (bound) != SSA_NAME)
3001 return NULL_RTX;
3003 wide_int min, max;
3004 enum value_range_type rng = get_range_info (bound, &min, &max);
3005 if (rng != VR_RANGE)
3006 return NULL_RTX;
3008 if (!TREE_NO_WARNING (exp)
3009 && wi::ltu_p (wi::to_wide (maxobjsize), min)
3010 && warning_at (loc, OPT_Wstringop_overflow_,
3011 "%K%qD specified bound [%wu, %wu] "
3012 "exceeds maximum object size %E",
3013 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3014 TREE_NO_WARNING (exp) = true;
3016 if (!len || TREE_CODE (len) != INTEGER_CST)
3017 return NULL_RTX;
3019 if (wi::gtu_p (min, wi::to_wide (len)))
3020 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3022 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3023 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3026 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3027 bytes from constant string DATA + OFFSET and return it as target
3028 constant. */
3030 static rtx
3031 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3032 scalar_int_mode mode)
3034 const char *str = (const char *) data;
3036 gcc_assert (offset >= 0
3037 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3038 <= strlen (str) + 1));
3040 return c_readstr (str + offset, mode);
3043 /* LEN specify length of the block of memcpy/memset operation.
3044 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3045 In some cases we can make very likely guess on max size, then we
3046 set it into PROBABLE_MAX_SIZE. */
3048 static void
3049 determine_block_size (tree len, rtx len_rtx,
3050 unsigned HOST_WIDE_INT *min_size,
3051 unsigned HOST_WIDE_INT *max_size,
3052 unsigned HOST_WIDE_INT *probable_max_size)
3054 if (CONST_INT_P (len_rtx))
3056 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3057 return;
3059 else
3061 wide_int min, max;
3062 enum value_range_type range_type = VR_UNDEFINED;
3064 /* Determine bounds from the type. */
3065 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3066 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3067 else
3068 *min_size = 0;
3069 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3070 *probable_max_size = *max_size
3071 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3072 else
3073 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3075 if (TREE_CODE (len) == SSA_NAME)
3076 range_type = get_range_info (len, &min, &max);
3077 if (range_type == VR_RANGE)
3079 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3080 *min_size = min.to_uhwi ();
3081 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3082 *probable_max_size = *max_size = max.to_uhwi ();
3084 else if (range_type == VR_ANTI_RANGE)
3086 /* Anti range 0...N lets us to determine minimal size to N+1. */
3087 if (min == 0)
3089 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3090 *min_size = max.to_uhwi () + 1;
3092 /* Code like
3094 int n;
3095 if (n < 100)
3096 memcpy (a, b, n)
3098 Produce anti range allowing negative values of N. We still
3099 can use the information and make a guess that N is not negative.
3101 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3102 *probable_max_size = min.to_uhwi () - 1;
3105 gcc_checking_assert (*max_size <=
3106 (unsigned HOST_WIDE_INT)
3107 GET_MODE_MASK (GET_MODE (len_rtx)));
3110 /* Try to verify that the sizes and lengths of the arguments to a string
3111 manipulation function given by EXP are within valid bounds and that
3112 the operation does not lead to buffer overflow or read past the end.
3113 Arguments other than EXP may be null. When non-null, the arguments
3114 have the following meaning:
3115 DST is the destination of a copy call or NULL otherwise.
3116 SRC is the source of a copy call or NULL otherwise.
3117 DSTWRITE is the number of bytes written into the destination obtained
3118 from the user-supplied size argument to the function (such as in
3119 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3120 MAXREAD is the user-supplied bound on the length of the source sequence
3121 (such as in strncat(d, s, N). It specifies the upper limit on the number
3122 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3123 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3124 expression EXP is a string function call (as opposed to a memory call
3125 like memcpy). As an exception, SRCSTR can also be an integer denoting
3126 the precomputed size of the source string or object (for functions like
3127 memcpy).
3128 DSTSIZE is the size of the destination object specified by the last
3129 argument to the _chk builtins, typically resulting from the expansion
3130 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3131 DSTSIZE).
3133 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3134 SIZE_MAX.
3136 If the call is successfully verified as safe return true, otherwise
3137 return false. */
3139 static bool
3140 check_access (tree exp, tree, tree, tree dstwrite,
3141 tree maxread, tree srcstr, tree dstsize)
3143 int opt = OPT_Wstringop_overflow_;
3145 /* The size of the largest object is half the address space, or
3146 PTRDIFF_MAX. (This is way too permissive.) */
3147 tree maxobjsize = max_object_size ();
3149 /* Either the length of the source string for string functions or
3150 the size of the source object for raw memory functions. */
3151 tree slen = NULL_TREE;
3153 tree range[2] = { NULL_TREE, NULL_TREE };
3155 /* Set to true when the exact number of bytes written by a string
3156 function like strcpy is not known and the only thing that is
3157 known is that it must be at least one (for the terminating nul). */
3158 bool at_least_one = false;
3159 if (srcstr)
3161 /* SRCSTR is normally a pointer to string but as a special case
3162 it can be an integer denoting the length of a string. */
3163 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3165 /* Try to determine the range of lengths the source string
3166 refers to. If it can be determined and is less than
3167 the upper bound given by MAXREAD add one to it for
3168 the terminating nul. Otherwise, set it to one for
3169 the same reason, or to MAXREAD as appropriate. */
3170 get_range_strlen (srcstr, range);
3171 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3173 if (maxread && tree_int_cst_le (maxread, range[0]))
3174 range[0] = range[1] = maxread;
3175 else
3176 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3177 range[0], size_one_node);
3179 if (maxread && tree_int_cst_le (maxread, range[1]))
3180 range[1] = maxread;
3181 else if (!integer_all_onesp (range[1]))
3182 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3183 range[1], size_one_node);
3185 slen = range[0];
3187 else
3189 at_least_one = true;
3190 slen = size_one_node;
3193 else
3194 slen = srcstr;
3197 if (!dstwrite && !maxread)
3199 /* When the only available piece of data is the object size
3200 there is nothing to do. */
3201 if (!slen)
3202 return true;
3204 /* Otherwise, when the length of the source sequence is known
3205 (as with strlen), set DSTWRITE to it. */
3206 if (!range[0])
3207 dstwrite = slen;
3210 if (!dstsize)
3211 dstsize = maxobjsize;
3213 if (dstwrite)
3214 get_size_range (dstwrite, range);
3216 tree func = get_callee_fndecl (exp);
3218 /* First check the number of bytes to be written against the maximum
3219 object size. */
3220 if (range[0]
3221 && TREE_CODE (range[0]) == INTEGER_CST
3222 && tree_int_cst_lt (maxobjsize, range[0]))
3224 if (TREE_NO_WARNING (exp))
3225 return false;
3227 location_t loc = tree_nonartificial_location (exp);
3228 loc = expansion_point_location_if_in_system_header (loc);
3230 bool warned;
3231 if (range[0] == range[1])
3232 warned = warning_at (loc, opt,
3233 "%K%qD specified size %E "
3234 "exceeds maximum object size %E",
3235 exp, func, range[0], maxobjsize);
3236 else
3237 warned = warning_at (loc, opt,
3238 "%K%qD specified size between %E and %E "
3239 "exceeds maximum object size %E",
3240 exp, func,
3241 range[0], range[1], maxobjsize);
3242 if (warned)
3243 TREE_NO_WARNING (exp) = true;
3245 return false;
3248 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3249 constant, and in range of unsigned HOST_WIDE_INT. */
3250 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3252 /* Next check the number of bytes to be written against the destination
3253 object size. */
3254 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3256 if (range[0]
3257 && TREE_CODE (range[0]) == INTEGER_CST
3258 && ((tree_fits_uhwi_p (dstsize)
3259 && tree_int_cst_lt (dstsize, range[0]))
3260 || (dstwrite
3261 && tree_fits_uhwi_p (dstwrite)
3262 && tree_int_cst_lt (dstwrite, range[0]))))
3264 if (TREE_NO_WARNING (exp))
3265 return false;
3267 location_t loc = tree_nonartificial_location (exp);
3268 loc = expansion_point_location_if_in_system_header (loc);
3270 if (dstwrite == slen && at_least_one)
3272 /* This is a call to strcpy with a destination of 0 size
3273 and a source of unknown length. The call will write
3274 at least one byte past the end of the destination. */
3275 warning_at (loc, opt,
3276 "%K%qD writing %E or more bytes into a region "
3277 "of size %E overflows the destination",
3278 exp, func, range[0], dstsize);
3280 else if (tree_int_cst_equal (range[0], range[1]))
3281 warning_n (loc, opt, tree_to_uhwi (range[0]),
3282 "%K%qD writing %E byte into a region "
3283 "of size %E overflows the destination",
3284 "%K%qD writing %E bytes into a region "
3285 "of size %E overflows the destination",
3286 exp, func, range[0], dstsize);
3287 else if (tree_int_cst_sign_bit (range[1]))
3289 /* Avoid printing the upper bound if it's invalid. */
3290 warning_at (loc, opt,
3291 "%K%qD writing %E or more bytes into a region "
3292 "of size %E overflows the destination",
3293 exp, func, range[0], dstsize);
3295 else
3296 warning_at (loc, opt,
3297 "%K%qD writing between %E and %E bytes into "
3298 "a region of size %E overflows the destination",
3299 exp, func, range[0], range[1],
3300 dstsize);
3302 /* Return error when an overflow has been detected. */
3303 return false;
3307 /* Check the maximum length of the source sequence against the size
3308 of the destination object if known, or against the maximum size
3309 of an object. */
3310 if (maxread)
3312 get_size_range (maxread, range);
3314 /* Use the lower end for MAXREAD from now on. */
3315 if (range[0])
3316 maxread = range[0];
3318 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3320 location_t loc = tree_nonartificial_location (exp);
3321 loc = expansion_point_location_if_in_system_header (loc);
3323 if (tree_int_cst_lt (maxobjsize, range[0]))
3325 if (TREE_NO_WARNING (exp))
3326 return false;
3328 /* Warn about crazy big sizes first since that's more
3329 likely to be meaningful than saying that the bound
3330 is greater than the object size if both are big. */
3331 if (range[0] == range[1])
3332 warning_at (loc, opt,
3333 "%K%qD specified bound %E "
3334 "exceeds maximum object size %E",
3335 exp, func,
3336 range[0], maxobjsize);
3337 else
3338 warning_at (loc, opt,
3339 "%K%qD specified bound between %E and %E "
3340 "exceeds maximum object size %E",
3341 exp, func,
3342 range[0], range[1], maxobjsize);
3344 return false;
3347 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3349 if (TREE_NO_WARNING (exp))
3350 return false;
3352 if (tree_int_cst_equal (range[0], range[1]))
3353 warning_at (loc, opt,
3354 "%K%qD specified bound %E "
3355 "exceeds destination size %E",
3356 exp, func,
3357 range[0], dstsize);
3358 else
3359 warning_at (loc, opt,
3360 "%K%qD specified bound between %E and %E "
3361 "exceeds destination size %E",
3362 exp, func,
3363 range[0], range[1], dstsize);
3364 return false;
3369 /* Check for reading past the end of SRC. */
3370 if (slen
3371 && slen == srcstr
3372 && dstwrite && range[0]
3373 && tree_int_cst_lt (slen, range[0]))
3375 if (TREE_NO_WARNING (exp))
3376 return false;
3378 location_t loc = tree_nonartificial_location (exp);
3380 if (tree_int_cst_equal (range[0], range[1]))
3381 warning_n (loc, opt, tree_to_uhwi (range[0]),
3382 "%K%qD reading %E byte from a region of size %E",
3383 "%K%qD reading %E bytes from a region of size %E",
3384 exp, func, range[0], slen);
3385 else if (tree_int_cst_sign_bit (range[1]))
3387 /* Avoid printing the upper bound if it's invalid. */
3388 warning_at (loc, opt,
3389 "%K%qD reading %E or more bytes from a region "
3390 "of size %E",
3391 exp, func, range[0], slen);
3393 else
3394 warning_at (loc, opt,
3395 "%K%qD reading between %E and %E bytes from a region "
3396 "of size %E",
3397 exp, func, range[0], range[1], slen);
3398 return false;
3401 return true;
3404 /* Helper to compute the size of the object referenced by the DEST
3405 expression which must have pointer type, using Object Size type
3406 OSTYPE (only the least significant 2 bits are used). Return
3407 an estimate of the size of the object if successful or NULL when
3408 the size cannot be determined. When the referenced object involves
3409 a non-constant offset in some range the returned value represents
3410 the largest size given the smallest non-negative offset in the
3411 range. The function is intended for diagnostics and should not
3412 be used to influence code generation or optimization. */
3414 tree
3415 compute_objsize (tree dest, int ostype)
3417 unsigned HOST_WIDE_INT size;
3419 /* Only the two least significant bits are meaningful. */
3420 ostype &= 3;
3422 if (compute_builtin_object_size (dest, ostype, &size))
3423 return build_int_cst (sizetype, size);
3425 if (TREE_CODE (dest) == SSA_NAME)
3427 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3428 if (!is_gimple_assign (stmt))
3429 return NULL_TREE;
3431 dest = gimple_assign_rhs1 (stmt);
3433 tree_code code = gimple_assign_rhs_code (stmt);
3434 if (code == POINTER_PLUS_EXPR)
3436 /* compute_builtin_object_size fails for addresses with
3437 non-constant offsets. Try to determine the range of
3438 such an offset here and use it to adjust the constant
3439 size. */
3440 tree off = gimple_assign_rhs2 (stmt);
3441 if (TREE_CODE (off) == INTEGER_CST)
3443 if (tree size = compute_objsize (dest, ostype))
3445 wide_int wioff = wi::to_wide (off);
3446 wide_int wisiz = wi::to_wide (size);
3448 /* Ignore negative offsets for now. For others,
3449 use the lower bound as the most optimistic
3450 estimate of the (remaining) size. */
3451 if (wi::sign_mask (wioff))
3453 else if (wi::ltu_p (wioff, wisiz))
3454 return wide_int_to_tree (TREE_TYPE (size),
3455 wi::sub (wisiz, wioff));
3456 else
3457 return size_zero_node;
3460 else if (TREE_CODE (off) == SSA_NAME
3461 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3463 wide_int min, max;
3464 enum value_range_type rng = get_range_info (off, &min, &max);
3466 if (rng == VR_RANGE)
3468 if (tree size = compute_objsize (dest, ostype))
3470 wide_int wisiz = wi::to_wide (size);
3472 /* Ignore negative offsets for now. For others,
3473 use the lower bound as the most optimistic
3474 estimate of the (remaining)size. */
3475 if (wi::sign_mask (min))
3477 else if (wi::ltu_p (min, wisiz))
3478 return wide_int_to_tree (TREE_TYPE (size),
3479 wi::sub (wisiz, min));
3480 else
3481 return size_zero_node;
3486 else if (code != ADDR_EXPR)
3487 return NULL_TREE;
3490 /* Unless computing the largest size (for memcpy and other raw memory
3491 functions), try to determine the size of the object from its type. */
3492 if (!ostype)
3493 return NULL_TREE;
3495 if (TREE_CODE (dest) != ADDR_EXPR)
3496 return NULL_TREE;
3498 tree type = TREE_TYPE (dest);
3499 if (TREE_CODE (type) == POINTER_TYPE)
3500 type = TREE_TYPE (type);
3502 type = TYPE_MAIN_VARIANT (type);
3504 if (TREE_CODE (type) == ARRAY_TYPE
3505 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
3507 /* Return the constant size unless it's zero (that's a zero-length
3508 array likely at the end of a struct). */
3509 tree size = TYPE_SIZE_UNIT (type);
3510 if (size && TREE_CODE (size) == INTEGER_CST
3511 && !integer_zerop (size))
3512 return size;
3515 return NULL_TREE;
3518 /* Helper to determine and check the sizes of the source and the destination
3519 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3520 call expression, DEST is the destination argument, SRC is the source
3521 argument or null, and LEN is the number of bytes. Use Object Size type-0
3522 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3523 (no overflow or invalid sizes), false otherwise. */
3525 static bool
3526 check_memop_access (tree exp, tree dest, tree src, tree size)
3528 /* For functions like memset and memcpy that operate on raw memory
3529 try to determine the size of the largest source and destination
3530 object using type-0 Object Size regardless of the object size
3531 type specified by the option. */
3532 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3533 tree dstsize = compute_objsize (dest, 0);
3535 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3536 srcsize, dstsize);
3539 /* Validate memchr arguments without performing any expansion.
3540 Return NULL_RTX. */
3542 static rtx
3543 expand_builtin_memchr (tree exp, rtx)
3545 if (!validate_arglist (exp,
3546 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3547 return NULL_RTX;
3549 tree arg1 = CALL_EXPR_ARG (exp, 0);
3550 tree len = CALL_EXPR_ARG (exp, 2);
3552 /* Diagnose calls where the specified length exceeds the size
3553 of the object. */
3554 if (warn_stringop_overflow)
3556 tree size = compute_objsize (arg1, 0);
3557 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3558 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3561 return NULL_RTX;
3564 /* Expand a call EXP to the memcpy builtin.
3565 Return NULL_RTX if we failed, the caller should emit a normal call,
3566 otherwise try to get the result in TARGET, if convenient (and in
3567 mode MODE if that's convenient). */
3569 static rtx
3570 expand_builtin_memcpy (tree exp, rtx target)
3572 if (!validate_arglist (exp,
3573 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3574 return NULL_RTX;
3576 tree dest = CALL_EXPR_ARG (exp, 0);
3577 tree src = CALL_EXPR_ARG (exp, 1);
3578 tree len = CALL_EXPR_ARG (exp, 2);
3580 check_memop_access (exp, dest, src, len);
3582 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3583 /*endp=*/ 0);
3586 /* Check a call EXP to the memmove built-in for validity.
3587 Return NULL_RTX on both success and failure. */
3589 static rtx
3590 expand_builtin_memmove (tree exp, rtx)
3592 if (!validate_arglist (exp,
3593 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3594 return NULL_RTX;
3596 tree dest = CALL_EXPR_ARG (exp, 0);
3597 tree src = CALL_EXPR_ARG (exp, 1);
3598 tree len = CALL_EXPR_ARG (exp, 2);
3600 check_memop_access (exp, dest, src, len);
3602 return NULL_RTX;
3605 /* Expand a call EXP to the mempcpy builtin.
3606 Return NULL_RTX if we failed; the caller should emit a normal call,
3607 otherwise try to get the result in TARGET, if convenient (and in
3608 mode MODE if that's convenient). If ENDP is 0 return the
3609 destination pointer, if ENDP is 1 return the end pointer ala
3610 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3611 stpcpy. */
3613 static rtx
3614 expand_builtin_mempcpy (tree exp, rtx target)
3616 if (!validate_arglist (exp,
3617 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3618 return NULL_RTX;
3620 tree dest = CALL_EXPR_ARG (exp, 0);
3621 tree src = CALL_EXPR_ARG (exp, 1);
3622 tree len = CALL_EXPR_ARG (exp, 2);
3624 /* Policy does not generally allow using compute_objsize (which
3625 is used internally by check_memop_size) to change code generation
3626 or drive optimization decisions.
3628 In this instance it is safe because the code we generate has
3629 the same semantics regardless of the return value of
3630 check_memop_sizes. Exactly the same amount of data is copied
3631 and the return value is exactly the same in both cases.
3633 Furthermore, check_memop_size always uses mode 0 for the call to
3634 compute_objsize, so the imprecise nature of compute_objsize is
3635 avoided. */
3637 /* Avoid expanding mempcpy into memcpy when the call is determined
3638 to overflow the buffer. This also prevents the same overflow
3639 from being diagnosed again when expanding memcpy. */
3640 if (!check_memop_access (exp, dest, src, len))
3641 return NULL_RTX;
3643 return expand_builtin_mempcpy_args (dest, src, len,
3644 target, exp, /*endp=*/ 1);
3647 /* Helper function to do the actual work for expand of memory copy family
3648 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3649 of memory from SRC to DEST and assign to TARGET if convenient.
3650 If ENDP is 0 return the
3651 destination pointer, if ENDP is 1 return the end pointer ala
3652 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3653 stpcpy. */
3655 static rtx
3656 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3657 rtx target, tree exp, int endp)
3659 const char *src_str;
3660 unsigned int src_align = get_pointer_alignment (src);
3661 unsigned int dest_align = get_pointer_alignment (dest);
3662 rtx dest_mem, src_mem, dest_addr, len_rtx;
3663 HOST_WIDE_INT expected_size = -1;
3664 unsigned int expected_align = 0;
3665 unsigned HOST_WIDE_INT min_size;
3666 unsigned HOST_WIDE_INT max_size;
3667 unsigned HOST_WIDE_INT probable_max_size;
3669 /* If DEST is not a pointer type, call the normal function. */
3670 if (dest_align == 0)
3671 return NULL_RTX;
3673 /* If either SRC is not a pointer type, don't do this
3674 operation in-line. */
3675 if (src_align == 0)
3676 return NULL_RTX;
3678 if (currently_expanding_gimple_stmt)
3679 stringop_block_profile (currently_expanding_gimple_stmt,
3680 &expected_align, &expected_size);
3682 if (expected_align < dest_align)
3683 expected_align = dest_align;
3684 dest_mem = get_memory_rtx (dest, len);
3685 set_mem_align (dest_mem, dest_align);
3686 len_rtx = expand_normal (len);
3687 determine_block_size (len, len_rtx, &min_size, &max_size,
3688 &probable_max_size);
3689 src_str = c_getstr (src);
3691 /* If SRC is a string constant and block move would be done
3692 by pieces, we can avoid loading the string from memory
3693 and only stored the computed constants. */
3694 if (src_str
3695 && CONST_INT_P (len_rtx)
3696 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3697 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3698 CONST_CAST (char *, src_str),
3699 dest_align, false))
3701 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3702 builtin_memcpy_read_str,
3703 CONST_CAST (char *, src_str),
3704 dest_align, false, endp);
3705 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3706 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3707 return dest_mem;
3710 src_mem = get_memory_rtx (src, len);
3711 set_mem_align (src_mem, src_align);
3713 /* Copy word part most expediently. */
3714 enum block_op_methods method = BLOCK_OP_NORMAL;
3715 if (CALL_EXPR_TAILCALL (exp) && (endp == 0 || target == const0_rtx))
3716 method = BLOCK_OP_TAILCALL;
3717 if (endp == 1 && target != const0_rtx)
3718 method = BLOCK_OP_NO_LIBCALL_RET;
3719 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3720 expected_align, expected_size,
3721 min_size, max_size, probable_max_size);
3722 if (dest_addr == pc_rtx)
3723 return NULL_RTX;
3725 if (dest_addr == 0)
3727 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3728 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3731 if (endp && target != const0_rtx)
3733 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3734 /* stpcpy pointer to last byte. */
3735 if (endp == 2)
3736 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3739 return dest_addr;
3742 static rtx
3743 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3744 rtx target, tree orig_exp, int endp)
3746 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3747 endp);
3750 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3751 we failed, the caller should emit a normal call, otherwise try to
3752 get the result in TARGET, if convenient. If ENDP is 0 return the
3753 destination pointer, if ENDP is 1 return the end pointer ala
3754 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3755 stpcpy. */
3757 static rtx
3758 expand_movstr (tree dest, tree src, rtx target, int endp)
3760 struct expand_operand ops[3];
3761 rtx dest_mem;
3762 rtx src_mem;
3764 if (!targetm.have_movstr ())
3765 return NULL_RTX;
3767 dest_mem = get_memory_rtx (dest, NULL);
3768 src_mem = get_memory_rtx (src, NULL);
3769 if (!endp)
3771 target = force_reg (Pmode, XEXP (dest_mem, 0));
3772 dest_mem = replace_equiv_address (dest_mem, target);
3775 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3776 create_fixed_operand (&ops[1], dest_mem);
3777 create_fixed_operand (&ops[2], src_mem);
3778 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3779 return NULL_RTX;
3781 if (endp && target != const0_rtx)
3783 target = ops[0].value;
3784 /* movstr is supposed to set end to the address of the NUL
3785 terminator. If the caller requested a mempcpy-like return value,
3786 adjust it. */
3787 if (endp == 1)
3789 rtx tem = plus_constant (GET_MODE (target),
3790 gen_lowpart (GET_MODE (target), target), 1);
3791 emit_move_insn (target, force_operand (tem, NULL_RTX));
3794 return target;
3797 /* Do some very basic size validation of a call to the strcpy builtin
3798 given by EXP. Return NULL_RTX to have the built-in expand to a call
3799 to the library function. */
3801 static rtx
3802 expand_builtin_strcat (tree exp, rtx)
3804 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3805 || !warn_stringop_overflow)
3806 return NULL_RTX;
3808 tree dest = CALL_EXPR_ARG (exp, 0);
3809 tree src = CALL_EXPR_ARG (exp, 1);
3811 /* There is no way here to determine the length of the string in
3812 the destination to which the SRC string is being appended so
3813 just diagnose cases when the souce string is longer than
3814 the destination object. */
3816 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3818 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3819 destsize);
3821 return NULL_RTX;
3824 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3825 NULL_RTX if we failed the caller should emit a normal call, otherwise
3826 try to get the result in TARGET, if convenient (and in mode MODE if that's
3827 convenient). */
3829 static rtx
3830 expand_builtin_strcpy (tree exp, rtx target)
3832 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3833 return NULL_RTX;
3835 tree dest = CALL_EXPR_ARG (exp, 0);
3836 tree src = CALL_EXPR_ARG (exp, 1);
3838 if (warn_stringop_overflow)
3840 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3841 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3842 src, destsize);
3845 if (rtx ret = expand_builtin_strcpy_args (dest, src, target))
3847 /* Check to see if the argument was declared attribute nonstring
3848 and if so, issue a warning since at this point it's not known
3849 to be nul-terminated. */
3850 tree fndecl = get_callee_fndecl (exp);
3851 maybe_warn_nonstring_arg (fndecl, exp);
3852 return ret;
3855 return NULL_RTX;
3858 /* Helper function to do the actual work for expand_builtin_strcpy. The
3859 arguments to the builtin_strcpy call DEST and SRC are broken out
3860 so that this can also be called without constructing an actual CALL_EXPR.
3861 The other arguments and return value are the same as for
3862 expand_builtin_strcpy. */
3864 static rtx
3865 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3867 return expand_movstr (dest, src, target, /*endp=*/0);
3870 /* Expand a call EXP to the stpcpy builtin.
3871 Return NULL_RTX if we failed the caller should emit a normal call,
3872 otherwise try to get the result in TARGET, if convenient (and in
3873 mode MODE if that's convenient). */
3875 static rtx
3876 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3878 tree dst, src;
3879 location_t loc = EXPR_LOCATION (exp);
3881 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3882 return NULL_RTX;
3884 dst = CALL_EXPR_ARG (exp, 0);
3885 src = CALL_EXPR_ARG (exp, 1);
3887 if (warn_stringop_overflow)
3889 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3890 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3891 src, destsize);
3894 /* If return value is ignored, transform stpcpy into strcpy. */
3895 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3897 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3898 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3899 return expand_expr (result, target, mode, EXPAND_NORMAL);
3901 else
3903 tree len, lenp1;
3904 rtx ret;
3906 /* Ensure we get an actual string whose length can be evaluated at
3907 compile-time, not an expression containing a string. This is
3908 because the latter will potentially produce pessimized code
3909 when used to produce the return value. */
3910 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3911 return expand_movstr (dst, src, target, /*endp=*/2);
3913 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3914 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3915 target, exp, /*endp=*/2);
3917 if (ret)
3918 return ret;
3920 if (TREE_CODE (len) == INTEGER_CST)
3922 rtx len_rtx = expand_normal (len);
3924 if (CONST_INT_P (len_rtx))
3926 ret = expand_builtin_strcpy_args (dst, src, target);
3928 if (ret)
3930 if (! target)
3932 if (mode != VOIDmode)
3933 target = gen_reg_rtx (mode);
3934 else
3935 target = gen_reg_rtx (GET_MODE (ret));
3937 if (GET_MODE (target) != GET_MODE (ret))
3938 ret = gen_lowpart (GET_MODE (target), ret);
3940 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3941 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3942 gcc_assert (ret);
3944 return target;
3949 return expand_movstr (dst, src, target, /*endp=*/2);
3953 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3954 arguments while being careful to avoid duplicate warnings (which could
3955 be issued if the expander were to expand the call, resulting in it
3956 being emitted in expand_call(). */
3958 static rtx
3959 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3961 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3963 /* The call has been successfully expanded. Check for nonstring
3964 arguments and issue warnings as appropriate. */
3965 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3966 return ret;
3969 return NULL_RTX;
3972 /* Check a call EXP to the stpncpy built-in for validity.
3973 Return NULL_RTX on both success and failure. */
3975 static rtx
3976 expand_builtin_stpncpy (tree exp, rtx)
3978 if (!validate_arglist (exp,
3979 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3980 || !warn_stringop_overflow)
3981 return NULL_RTX;
3983 /* The source and destination of the call. */
3984 tree dest = CALL_EXPR_ARG (exp, 0);
3985 tree src = CALL_EXPR_ARG (exp, 1);
3987 /* The exact number of bytes to write (not the maximum). */
3988 tree len = CALL_EXPR_ARG (exp, 2);
3990 /* The size of the destination object. */
3991 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3993 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
3995 return NULL_RTX;
3998 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3999 bytes from constant string DATA + OFFSET and return it as target
4000 constant. */
4003 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
4004 scalar_int_mode mode)
4006 const char *str = (const char *) data;
4008 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4009 return const0_rtx;
4011 return c_readstr (str + offset, mode);
4014 /* Helper to check the sizes of sequences and the destination of calls
4015 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4016 success (no overflow or invalid sizes), false otherwise. */
4018 static bool
4019 check_strncat_sizes (tree exp, tree objsize)
4021 tree dest = CALL_EXPR_ARG (exp, 0);
4022 tree src = CALL_EXPR_ARG (exp, 1);
4023 tree maxread = CALL_EXPR_ARG (exp, 2);
4025 /* Try to determine the range of lengths that the source expression
4026 refers to. */
4027 tree lenrange[2];
4028 get_range_strlen (src, lenrange);
4030 /* Try to verify that the destination is big enough for the shortest
4031 string. */
4033 if (!objsize && warn_stringop_overflow)
4035 /* If it hasn't been provided by __strncat_chk, try to determine
4036 the size of the destination object into which the source is
4037 being copied. */
4038 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4041 /* Add one for the terminating nul. */
4042 tree srclen = (lenrange[0]
4043 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4044 size_one_node)
4045 : NULL_TREE);
4047 /* The strncat function copies at most MAXREAD bytes and always appends
4048 the terminating nul so the specified upper bound should never be equal
4049 to (or greater than) the size of the destination. */
4050 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4051 && tree_int_cst_equal (objsize, maxread))
4053 location_t loc = tree_nonartificial_location (exp);
4054 loc = expansion_point_location_if_in_system_header (loc);
4056 warning_at (loc, OPT_Wstringop_overflow_,
4057 "%K%qD specified bound %E equals destination size",
4058 exp, get_callee_fndecl (exp), maxread);
4060 return false;
4063 if (!srclen
4064 || (maxread && tree_fits_uhwi_p (maxread)
4065 && tree_fits_uhwi_p (srclen)
4066 && tree_int_cst_lt (maxread, srclen)))
4067 srclen = maxread;
4069 /* The number of bytes to write is LEN but check_access will also
4070 check SRCLEN if LEN's value isn't known. */
4071 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4072 objsize);
4075 /* Similar to expand_builtin_strcat, do some very basic size validation
4076 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4077 the built-in expand to a call to the library function. */
4079 static rtx
4080 expand_builtin_strncat (tree exp, rtx)
4082 if (!validate_arglist (exp,
4083 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4084 || !warn_stringop_overflow)
4085 return NULL_RTX;
4087 tree dest = CALL_EXPR_ARG (exp, 0);
4088 tree src = CALL_EXPR_ARG (exp, 1);
4089 /* The upper bound on the number of bytes to write. */
4090 tree maxread = CALL_EXPR_ARG (exp, 2);
4091 /* The length of the source sequence. */
4092 tree slen = c_strlen (src, 1);
4094 /* Try to determine the range of lengths that the source expression
4095 refers to. */
4096 tree lenrange[2];
4097 if (slen)
4098 lenrange[0] = lenrange[1] = slen;
4099 else
4100 get_range_strlen (src, lenrange);
4102 /* Try to verify that the destination is big enough for the shortest
4103 string. First try to determine the size of the destination object
4104 into which the source is being copied. */
4105 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4107 /* Add one for the terminating nul. */
4108 tree srclen = (lenrange[0]
4109 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4110 size_one_node)
4111 : NULL_TREE);
4113 /* The strncat function copies at most MAXREAD bytes and always appends
4114 the terminating nul so the specified upper bound should never be equal
4115 to (or greater than) the size of the destination. */
4116 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4117 && tree_int_cst_equal (destsize, maxread))
4119 location_t loc = tree_nonartificial_location (exp);
4120 loc = expansion_point_location_if_in_system_header (loc);
4122 warning_at (loc, OPT_Wstringop_overflow_,
4123 "%K%qD specified bound %E equals destination size",
4124 exp, get_callee_fndecl (exp), maxread);
4126 return NULL_RTX;
4129 if (!srclen
4130 || (maxread && tree_fits_uhwi_p (maxread)
4131 && tree_fits_uhwi_p (srclen)
4132 && tree_int_cst_lt (maxread, srclen)))
4133 srclen = maxread;
4135 /* The number of bytes to write is SRCLEN. */
4136 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4138 return NULL_RTX;
4141 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4142 NULL_RTX if we failed the caller should emit a normal call. */
4144 static rtx
4145 expand_builtin_strncpy (tree exp, rtx target)
4147 location_t loc = EXPR_LOCATION (exp);
4149 if (validate_arglist (exp,
4150 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4152 tree dest = CALL_EXPR_ARG (exp, 0);
4153 tree src = CALL_EXPR_ARG (exp, 1);
4154 /* The number of bytes to write (not the maximum). */
4155 tree len = CALL_EXPR_ARG (exp, 2);
4156 /* The length of the source sequence. */
4157 tree slen = c_strlen (src, 1);
4159 if (warn_stringop_overflow)
4161 tree destsize = compute_objsize (dest,
4162 warn_stringop_overflow - 1);
4164 /* The number of bytes to write is LEN but check_access will also
4165 check SLEN if LEN's value isn't known. */
4166 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4167 destsize);
4170 /* We must be passed a constant len and src parameter. */
4171 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4172 return NULL_RTX;
4174 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4176 /* We're required to pad with trailing zeros if the requested
4177 len is greater than strlen(s2)+1. In that case try to
4178 use store_by_pieces, if it fails, punt. */
4179 if (tree_int_cst_lt (slen, len))
4181 unsigned int dest_align = get_pointer_alignment (dest);
4182 const char *p = c_getstr (src);
4183 rtx dest_mem;
4185 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4186 || !can_store_by_pieces (tree_to_uhwi (len),
4187 builtin_strncpy_read_str,
4188 CONST_CAST (char *, p),
4189 dest_align, false))
4190 return NULL_RTX;
4192 dest_mem = get_memory_rtx (dest, len);
4193 store_by_pieces (dest_mem, tree_to_uhwi (len),
4194 builtin_strncpy_read_str,
4195 CONST_CAST (char *, p), dest_align, false, 0);
4196 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4197 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4198 return dest_mem;
4201 return NULL_RTX;
4204 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4205 bytes from constant string DATA + OFFSET and return it as target
4206 constant. */
4209 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4210 scalar_int_mode mode)
4212 const char *c = (const char *) data;
4213 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4215 memset (p, *c, GET_MODE_SIZE (mode));
4217 return c_readstr (p, mode);
4220 /* Callback routine for store_by_pieces. Return the RTL of a register
4221 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4222 char value given in the RTL register data. For example, if mode is
4223 4 bytes wide, return the RTL for 0x01010101*data. */
4225 static rtx
4226 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4227 scalar_int_mode mode)
4229 rtx target, coeff;
4230 size_t size;
4231 char *p;
4233 size = GET_MODE_SIZE (mode);
4234 if (size == 1)
4235 return (rtx) data;
4237 p = XALLOCAVEC (char, size);
4238 memset (p, 1, size);
4239 coeff = c_readstr (p, mode);
4241 target = convert_to_mode (mode, (rtx) data, 1);
4242 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4243 return force_reg (mode, target);
4246 /* Expand expression EXP, which is a call to the memset builtin. Return
4247 NULL_RTX if we failed the caller should emit a normal call, otherwise
4248 try to get the result in TARGET, if convenient (and in mode MODE if that's
4249 convenient). */
4251 static rtx
4252 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4254 if (!validate_arglist (exp,
4255 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4256 return NULL_RTX;
4258 tree dest = CALL_EXPR_ARG (exp, 0);
4259 tree val = CALL_EXPR_ARG (exp, 1);
4260 tree len = CALL_EXPR_ARG (exp, 2);
4262 check_memop_access (exp, dest, NULL_TREE, len);
4264 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4267 /* Helper function to do the actual work for expand_builtin_memset. The
4268 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4269 so that this can also be called without constructing an actual CALL_EXPR.
4270 The other arguments and return value are the same as for
4271 expand_builtin_memset. */
4273 static rtx
4274 expand_builtin_memset_args (tree dest, tree val, tree len,
4275 rtx target, machine_mode mode, tree orig_exp)
4277 tree fndecl, fn;
4278 enum built_in_function fcode;
4279 machine_mode val_mode;
4280 char c;
4281 unsigned int dest_align;
4282 rtx dest_mem, dest_addr, len_rtx;
4283 HOST_WIDE_INT expected_size = -1;
4284 unsigned int expected_align = 0;
4285 unsigned HOST_WIDE_INT min_size;
4286 unsigned HOST_WIDE_INT max_size;
4287 unsigned HOST_WIDE_INT probable_max_size;
4289 dest_align = get_pointer_alignment (dest);
4291 /* If DEST is not a pointer type, don't do this operation in-line. */
4292 if (dest_align == 0)
4293 return NULL_RTX;
4295 if (currently_expanding_gimple_stmt)
4296 stringop_block_profile (currently_expanding_gimple_stmt,
4297 &expected_align, &expected_size);
4299 if (expected_align < dest_align)
4300 expected_align = dest_align;
4302 /* If the LEN parameter is zero, return DEST. */
4303 if (integer_zerop (len))
4305 /* Evaluate and ignore VAL in case it has side-effects. */
4306 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4307 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4310 /* Stabilize the arguments in case we fail. */
4311 dest = builtin_save_expr (dest);
4312 val = builtin_save_expr (val);
4313 len = builtin_save_expr (len);
4315 len_rtx = expand_normal (len);
4316 determine_block_size (len, len_rtx, &min_size, &max_size,
4317 &probable_max_size);
4318 dest_mem = get_memory_rtx (dest, len);
4319 val_mode = TYPE_MODE (unsigned_char_type_node);
4321 if (TREE_CODE (val) != INTEGER_CST)
4323 rtx val_rtx;
4325 val_rtx = expand_normal (val);
4326 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4328 /* Assume that we can memset by pieces if we can store
4329 * the coefficients by pieces (in the required modes).
4330 * We can't pass builtin_memset_gen_str as that emits RTL. */
4331 c = 1;
4332 if (tree_fits_uhwi_p (len)
4333 && can_store_by_pieces (tree_to_uhwi (len),
4334 builtin_memset_read_str, &c, dest_align,
4335 true))
4337 val_rtx = force_reg (val_mode, val_rtx);
4338 store_by_pieces (dest_mem, tree_to_uhwi (len),
4339 builtin_memset_gen_str, val_rtx, dest_align,
4340 true, 0);
4342 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4343 dest_align, expected_align,
4344 expected_size, min_size, max_size,
4345 probable_max_size))
4346 goto do_libcall;
4348 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4349 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4350 return dest_mem;
4353 if (target_char_cast (val, &c))
4354 goto do_libcall;
4356 if (c)
4358 if (tree_fits_uhwi_p (len)
4359 && can_store_by_pieces (tree_to_uhwi (len),
4360 builtin_memset_read_str, &c, dest_align,
4361 true))
4362 store_by_pieces (dest_mem, tree_to_uhwi (len),
4363 builtin_memset_read_str, &c, dest_align, true, 0);
4364 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4365 gen_int_mode (c, val_mode),
4366 dest_align, expected_align,
4367 expected_size, min_size, max_size,
4368 probable_max_size))
4369 goto do_libcall;
4371 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4372 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4373 return dest_mem;
4376 set_mem_align (dest_mem, dest_align);
4377 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4378 CALL_EXPR_TAILCALL (orig_exp)
4379 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4380 expected_align, expected_size,
4381 min_size, max_size,
4382 probable_max_size);
4384 if (dest_addr == 0)
4386 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4387 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4390 return dest_addr;
4392 do_libcall:
4393 fndecl = get_callee_fndecl (orig_exp);
4394 fcode = DECL_FUNCTION_CODE (fndecl);
4395 if (fcode == BUILT_IN_MEMSET)
4396 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4397 dest, val, len);
4398 else if (fcode == BUILT_IN_BZERO)
4399 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4400 dest, len);
4401 else
4402 gcc_unreachable ();
4403 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4404 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4405 return expand_call (fn, target, target == const0_rtx);
4408 /* Expand expression EXP, which is a call to the bzero builtin. Return
4409 NULL_RTX if we failed the caller should emit a normal call. */
4411 static rtx
4412 expand_builtin_bzero (tree exp)
4414 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4415 return NULL_RTX;
4417 tree dest = CALL_EXPR_ARG (exp, 0);
4418 tree size = CALL_EXPR_ARG (exp, 1);
4420 check_memop_access (exp, dest, NULL_TREE, size);
4422 /* New argument list transforming bzero(ptr x, int y) to
4423 memset(ptr x, int 0, size_t y). This is done this way
4424 so that if it isn't expanded inline, we fallback to
4425 calling bzero instead of memset. */
4427 location_t loc = EXPR_LOCATION (exp);
4429 return expand_builtin_memset_args (dest, integer_zero_node,
4430 fold_convert_loc (loc,
4431 size_type_node, size),
4432 const0_rtx, VOIDmode, exp);
4435 /* Try to expand cmpstr operation ICODE with the given operands.
4436 Return the result rtx on success, otherwise return null. */
4438 static rtx
4439 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4440 HOST_WIDE_INT align)
4442 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4444 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4445 target = NULL_RTX;
4447 struct expand_operand ops[4];
4448 create_output_operand (&ops[0], target, insn_mode);
4449 create_fixed_operand (&ops[1], arg1_rtx);
4450 create_fixed_operand (&ops[2], arg2_rtx);
4451 create_integer_operand (&ops[3], align);
4452 if (maybe_expand_insn (icode, 4, ops))
4453 return ops[0].value;
4454 return NULL_RTX;
4457 /* Expand expression EXP, which is a call to the memcmp built-in function.
4458 Return NULL_RTX if we failed and the caller should emit a normal call,
4459 otherwise try to get the result in TARGET, if convenient.
4460 RESULT_EQ is true if we can relax the returned value to be either zero
4461 or nonzero, without caring about the sign. */
4463 static rtx
4464 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4466 if (!validate_arglist (exp,
4467 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4468 return NULL_RTX;
4470 tree arg1 = CALL_EXPR_ARG (exp, 0);
4471 tree arg2 = CALL_EXPR_ARG (exp, 1);
4472 tree len = CALL_EXPR_ARG (exp, 2);
4473 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4474 bool no_overflow = true;
4476 /* Diagnose calls where the specified length exceeds the size of either
4477 object. */
4478 tree size = compute_objsize (arg1, 0);
4479 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4480 len, /*maxread=*/NULL_TREE, size,
4481 /*objsize=*/NULL_TREE);
4482 if (no_overflow)
4484 size = compute_objsize (arg2, 0);
4485 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4486 len, /*maxread=*/NULL_TREE, size,
4487 /*objsize=*/NULL_TREE);
4490 /* If the specified length exceeds the size of either object,
4491 call the function. */
4492 if (!no_overflow)
4493 return NULL_RTX;
4495 /* Due to the performance benefit, always inline the calls first
4496 when result_eq is false. */
4497 rtx result = NULL_RTX;
4499 if (!result_eq && fcode != BUILT_IN_BCMP)
4501 result = inline_expand_builtin_string_cmp (exp, target);
4502 if (result)
4503 return result;
4506 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4507 location_t loc = EXPR_LOCATION (exp);
4509 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4510 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4512 /* If we don't have POINTER_TYPE, call the function. */
4513 if (arg1_align == 0 || arg2_align == 0)
4514 return NULL_RTX;
4516 rtx arg1_rtx = get_memory_rtx (arg1, len);
4517 rtx arg2_rtx = get_memory_rtx (arg2, len);
4518 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4520 /* Set MEM_SIZE as appropriate. */
4521 if (CONST_INT_P (len_rtx))
4523 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4524 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4527 by_pieces_constfn constfn = NULL;
4529 const char *src_str = c_getstr (arg2);
4530 if (result_eq && src_str == NULL)
4532 src_str = c_getstr (arg1);
4533 if (src_str != NULL)
4534 std::swap (arg1_rtx, arg2_rtx);
4537 /* If SRC is a string constant and block move would be done
4538 by pieces, we can avoid loading the string from memory
4539 and only stored the computed constants. */
4540 if (src_str
4541 && CONST_INT_P (len_rtx)
4542 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4543 constfn = builtin_memcpy_read_str;
4545 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4546 TREE_TYPE (len), target,
4547 result_eq, constfn,
4548 CONST_CAST (char *, src_str));
4550 if (result)
4552 /* Return the value in the proper mode for this function. */
4553 if (GET_MODE (result) == mode)
4554 return result;
4556 if (target != 0)
4558 convert_move (target, result, 0);
4559 return target;
4562 return convert_to_mode (mode, result, 0);
4565 return NULL_RTX;
4568 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4569 if we failed the caller should emit a normal call, otherwise try to get
4570 the result in TARGET, if convenient. */
4572 static rtx
4573 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4575 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4576 return NULL_RTX;
4578 /* Due to the performance benefit, always inline the calls first. */
4579 rtx result = NULL_RTX;
4580 result = inline_expand_builtin_string_cmp (exp, target);
4581 if (result)
4582 return result;
4584 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4585 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4586 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4587 return NULL_RTX;
4589 tree arg1 = CALL_EXPR_ARG (exp, 0);
4590 tree arg2 = CALL_EXPR_ARG (exp, 1);
4592 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4593 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4595 /* If we don't have POINTER_TYPE, call the function. */
4596 if (arg1_align == 0 || arg2_align == 0)
4597 return NULL_RTX;
4599 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4600 arg1 = builtin_save_expr (arg1);
4601 arg2 = builtin_save_expr (arg2);
4603 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4604 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4606 /* Try to call cmpstrsi. */
4607 if (cmpstr_icode != CODE_FOR_nothing)
4608 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4609 MIN (arg1_align, arg2_align));
4611 /* Try to determine at least one length and call cmpstrnsi. */
4612 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4614 tree len;
4615 rtx arg3_rtx;
4617 tree len1 = c_strlen (arg1, 1);
4618 tree len2 = c_strlen (arg2, 1);
4620 if (len1)
4621 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4622 if (len2)
4623 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4625 /* If we don't have a constant length for the first, use the length
4626 of the second, if we know it. We don't require a constant for
4627 this case; some cost analysis could be done if both are available
4628 but neither is constant. For now, assume they're equally cheap,
4629 unless one has side effects. If both strings have constant lengths,
4630 use the smaller. */
4632 if (!len1)
4633 len = len2;
4634 else if (!len2)
4635 len = len1;
4636 else if (TREE_SIDE_EFFECTS (len1))
4637 len = len2;
4638 else if (TREE_SIDE_EFFECTS (len2))
4639 len = len1;
4640 else if (TREE_CODE (len1) != INTEGER_CST)
4641 len = len2;
4642 else if (TREE_CODE (len2) != INTEGER_CST)
4643 len = len1;
4644 else if (tree_int_cst_lt (len1, len2))
4645 len = len1;
4646 else
4647 len = len2;
4649 /* If both arguments have side effects, we cannot optimize. */
4650 if (len && !TREE_SIDE_EFFECTS (len))
4652 arg3_rtx = expand_normal (len);
4653 result = expand_cmpstrn_or_cmpmem
4654 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4655 arg3_rtx, MIN (arg1_align, arg2_align));
4659 tree fndecl = get_callee_fndecl (exp);
4660 if (result)
4662 /* Check to see if the argument was declared attribute nonstring
4663 and if so, issue a warning since at this point it's not known
4664 to be nul-terminated. */
4665 maybe_warn_nonstring_arg (fndecl, exp);
4667 /* Return the value in the proper mode for this function. */
4668 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4669 if (GET_MODE (result) == mode)
4670 return result;
4671 if (target == 0)
4672 return convert_to_mode (mode, result, 0);
4673 convert_move (target, result, 0);
4674 return target;
4677 /* Expand the library call ourselves using a stabilized argument
4678 list to avoid re-evaluating the function's arguments twice. */
4679 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4680 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4681 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4682 return expand_call (fn, target, target == const0_rtx);
4685 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4686 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4687 the result in TARGET, if convenient. */
4689 static rtx
4690 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4691 ATTRIBUTE_UNUSED machine_mode mode)
4693 if (!validate_arglist (exp,
4694 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4695 return NULL_RTX;
4697 /* Due to the performance benefit, always inline the calls first. */
4698 rtx result = NULL_RTX;
4699 result = inline_expand_builtin_string_cmp (exp, target);
4700 if (result)
4701 return result;
4703 /* If c_strlen can determine an expression for one of the string
4704 lengths, and it doesn't have side effects, then emit cmpstrnsi
4705 using length MIN(strlen(string)+1, arg3). */
4706 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4707 if (cmpstrn_icode == CODE_FOR_nothing)
4708 return NULL_RTX;
4710 tree len;
4712 tree arg1 = CALL_EXPR_ARG (exp, 0);
4713 tree arg2 = CALL_EXPR_ARG (exp, 1);
4714 tree arg3 = CALL_EXPR_ARG (exp, 2);
4716 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4717 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4719 tree len1 = c_strlen (arg1, 1);
4720 tree len2 = c_strlen (arg2, 1);
4722 location_t loc = EXPR_LOCATION (exp);
4724 if (len1)
4725 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4726 if (len2)
4727 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4729 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4731 /* If we don't have a constant length for the first, use the length
4732 of the second, if we know it. If neither string is constant length,
4733 use the given length argument. We don't require a constant for
4734 this case; some cost analysis could be done if both are available
4735 but neither is constant. For now, assume they're equally cheap,
4736 unless one has side effects. If both strings have constant lengths,
4737 use the smaller. */
4739 if (!len1 && !len2)
4740 len = len3;
4741 else if (!len1)
4742 len = len2;
4743 else if (!len2)
4744 len = len1;
4745 else if (TREE_SIDE_EFFECTS (len1))
4746 len = len2;
4747 else if (TREE_SIDE_EFFECTS (len2))
4748 len = len1;
4749 else if (TREE_CODE (len1) != INTEGER_CST)
4750 len = len2;
4751 else if (TREE_CODE (len2) != INTEGER_CST)
4752 len = len1;
4753 else if (tree_int_cst_lt (len1, len2))
4754 len = len1;
4755 else
4756 len = len2;
4758 /* If we are not using the given length, we must incorporate it here.
4759 The actual new length parameter will be MIN(len,arg3) in this case. */
4760 if (len != len3)
4762 len = fold_convert_loc (loc, sizetype, len);
4763 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4765 rtx arg1_rtx = get_memory_rtx (arg1, len);
4766 rtx arg2_rtx = get_memory_rtx (arg2, len);
4767 rtx arg3_rtx = expand_normal (len);
4768 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4769 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4770 MIN (arg1_align, arg2_align));
4772 tree fndecl = get_callee_fndecl (exp);
4773 if (result)
4775 /* Check to see if the argument was declared attribute nonstring
4776 and if so, issue a warning since at this point it's not known
4777 to be nul-terminated. */
4778 maybe_warn_nonstring_arg (fndecl, exp);
4780 /* Return the value in the proper mode for this function. */
4781 mode = TYPE_MODE (TREE_TYPE (exp));
4782 if (GET_MODE (result) == mode)
4783 return result;
4784 if (target == 0)
4785 return convert_to_mode (mode, result, 0);
4786 convert_move (target, result, 0);
4787 return target;
4790 /* Expand the library call ourselves using a stabilized argument
4791 list to avoid re-evaluating the function's arguments twice. */
4792 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4793 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4794 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4795 return expand_call (fn, target, target == const0_rtx);
4798 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4799 if that's convenient. */
4802 expand_builtin_saveregs (void)
4804 rtx val;
4805 rtx_insn *seq;
4807 /* Don't do __builtin_saveregs more than once in a function.
4808 Save the result of the first call and reuse it. */
4809 if (saveregs_value != 0)
4810 return saveregs_value;
4812 /* When this function is called, it means that registers must be
4813 saved on entry to this function. So we migrate the call to the
4814 first insn of this function. */
4816 start_sequence ();
4818 /* Do whatever the machine needs done in this case. */
4819 val = targetm.calls.expand_builtin_saveregs ();
4821 seq = get_insns ();
4822 end_sequence ();
4824 saveregs_value = val;
4826 /* Put the insns after the NOTE that starts the function. If this
4827 is inside a start_sequence, make the outer-level insn chain current, so
4828 the code is placed at the start of the function. */
4829 push_topmost_sequence ();
4830 emit_insn_after (seq, entry_of_function ());
4831 pop_topmost_sequence ();
4833 return val;
4836 /* Expand a call to __builtin_next_arg. */
4838 static rtx
4839 expand_builtin_next_arg (void)
4841 /* Checking arguments is already done in fold_builtin_next_arg
4842 that must be called before this function. */
4843 return expand_binop (ptr_mode, add_optab,
4844 crtl->args.internal_arg_pointer,
4845 crtl->args.arg_offset_rtx,
4846 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4849 /* Make it easier for the backends by protecting the valist argument
4850 from multiple evaluations. */
4852 static tree
4853 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4855 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4857 /* The current way of determining the type of valist is completely
4858 bogus. We should have the information on the va builtin instead. */
4859 if (!vatype)
4860 vatype = targetm.fn_abi_va_list (cfun->decl);
4862 if (TREE_CODE (vatype) == ARRAY_TYPE)
4864 if (TREE_SIDE_EFFECTS (valist))
4865 valist = save_expr (valist);
4867 /* For this case, the backends will be expecting a pointer to
4868 vatype, but it's possible we've actually been given an array
4869 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4870 So fix it. */
4871 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4873 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4874 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4877 else
4879 tree pt = build_pointer_type (vatype);
4881 if (! needs_lvalue)
4883 if (! TREE_SIDE_EFFECTS (valist))
4884 return valist;
4886 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4887 TREE_SIDE_EFFECTS (valist) = 1;
4890 if (TREE_SIDE_EFFECTS (valist))
4891 valist = save_expr (valist);
4892 valist = fold_build2_loc (loc, MEM_REF,
4893 vatype, valist, build_int_cst (pt, 0));
4896 return valist;
4899 /* The "standard" definition of va_list is void*. */
4901 tree
4902 std_build_builtin_va_list (void)
4904 return ptr_type_node;
4907 /* The "standard" abi va_list is va_list_type_node. */
4909 tree
4910 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4912 return va_list_type_node;
4915 /* The "standard" type of va_list is va_list_type_node. */
4917 tree
4918 std_canonical_va_list_type (tree type)
4920 tree wtype, htype;
4922 wtype = va_list_type_node;
4923 htype = type;
4925 if (TREE_CODE (wtype) == ARRAY_TYPE)
4927 /* If va_list is an array type, the argument may have decayed
4928 to a pointer type, e.g. by being passed to another function.
4929 In that case, unwrap both types so that we can compare the
4930 underlying records. */
4931 if (TREE_CODE (htype) == ARRAY_TYPE
4932 || POINTER_TYPE_P (htype))
4934 wtype = TREE_TYPE (wtype);
4935 htype = TREE_TYPE (htype);
4938 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4939 return va_list_type_node;
4941 return NULL_TREE;
4944 /* The "standard" implementation of va_start: just assign `nextarg' to
4945 the variable. */
4947 void
4948 std_expand_builtin_va_start (tree valist, rtx nextarg)
4950 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4951 convert_move (va_r, nextarg, 0);
4954 /* Expand EXP, a call to __builtin_va_start. */
4956 static rtx
4957 expand_builtin_va_start (tree exp)
4959 rtx nextarg;
4960 tree valist;
4961 location_t loc = EXPR_LOCATION (exp);
4963 if (call_expr_nargs (exp) < 2)
4965 error_at (loc, "too few arguments to function %<va_start%>");
4966 return const0_rtx;
4969 if (fold_builtin_next_arg (exp, true))
4970 return const0_rtx;
4972 nextarg = expand_builtin_next_arg ();
4973 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4975 if (targetm.expand_builtin_va_start)
4976 targetm.expand_builtin_va_start (valist, nextarg);
4977 else
4978 std_expand_builtin_va_start (valist, nextarg);
4980 return const0_rtx;
4983 /* Expand EXP, a call to __builtin_va_end. */
4985 static rtx
4986 expand_builtin_va_end (tree exp)
4988 tree valist = CALL_EXPR_ARG (exp, 0);
4990 /* Evaluate for side effects, if needed. I hate macros that don't
4991 do that. */
4992 if (TREE_SIDE_EFFECTS (valist))
4993 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4995 return const0_rtx;
4998 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4999 builtin rather than just as an assignment in stdarg.h because of the
5000 nastiness of array-type va_list types. */
5002 static rtx
5003 expand_builtin_va_copy (tree exp)
5005 tree dst, src, t;
5006 location_t loc = EXPR_LOCATION (exp);
5008 dst = CALL_EXPR_ARG (exp, 0);
5009 src = CALL_EXPR_ARG (exp, 1);
5011 dst = stabilize_va_list_loc (loc, dst, 1);
5012 src = stabilize_va_list_loc (loc, src, 0);
5014 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5016 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5018 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5019 TREE_SIDE_EFFECTS (t) = 1;
5020 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5022 else
5024 rtx dstb, srcb, size;
5026 /* Evaluate to pointers. */
5027 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5028 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5029 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5030 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5032 dstb = convert_memory_address (Pmode, dstb);
5033 srcb = convert_memory_address (Pmode, srcb);
5035 /* "Dereference" to BLKmode memories. */
5036 dstb = gen_rtx_MEM (BLKmode, dstb);
5037 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5038 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5039 srcb = gen_rtx_MEM (BLKmode, srcb);
5040 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5041 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5043 /* Copy. */
5044 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5047 return const0_rtx;
5050 /* Expand a call to one of the builtin functions __builtin_frame_address or
5051 __builtin_return_address. */
5053 static rtx
5054 expand_builtin_frame_address (tree fndecl, tree exp)
5056 /* The argument must be a nonnegative integer constant.
5057 It counts the number of frames to scan up the stack.
5058 The value is either the frame pointer value or the return
5059 address saved in that frame. */
5060 if (call_expr_nargs (exp) == 0)
5061 /* Warning about missing arg was already issued. */
5062 return const0_rtx;
5063 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5065 error ("invalid argument to %qD", fndecl);
5066 return const0_rtx;
5068 else
5070 /* Number of frames to scan up the stack. */
5071 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5073 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5075 /* Some ports cannot access arbitrary stack frames. */
5076 if (tem == NULL)
5078 warning (0, "unsupported argument to %qD", fndecl);
5079 return const0_rtx;
5082 if (count)
5084 /* Warn since no effort is made to ensure that any frame
5085 beyond the current one exists or can be safely reached. */
5086 warning (OPT_Wframe_address, "calling %qD with "
5087 "a nonzero argument is unsafe", fndecl);
5090 /* For __builtin_frame_address, return what we've got. */
5091 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5092 return tem;
5094 if (!REG_P (tem)
5095 && ! CONSTANT_P (tem))
5096 tem = copy_addr_to_reg (tem);
5097 return tem;
5101 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5102 failed and the caller should emit a normal call. */
5104 static rtx
5105 expand_builtin_alloca (tree exp)
5107 rtx op0;
5108 rtx result;
5109 unsigned int align;
5110 tree fndecl = get_callee_fndecl (exp);
5111 HOST_WIDE_INT max_size;
5112 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5113 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5114 bool valid_arglist
5115 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5116 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5117 VOID_TYPE)
5118 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5119 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5120 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5122 if (!valid_arglist)
5123 return NULL_RTX;
5125 if ((alloca_for_var
5126 && warn_vla_limit >= HOST_WIDE_INT_MAX
5127 && warn_alloc_size_limit < warn_vla_limit)
5128 || (!alloca_for_var
5129 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5130 && warn_alloc_size_limit < warn_alloca_limit
5133 /* -Walloca-larger-than and -Wvla-larger-than settings of
5134 less than HOST_WIDE_INT_MAX override the more general
5135 -Walloc-size-larger-than so unless either of the former
5136 options is smaller than the last one (wchich would imply
5137 that the call was already checked), check the alloca
5138 arguments for overflow. */
5139 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5140 int idx[] = { 0, -1 };
5141 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5144 /* Compute the argument. */
5145 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5147 /* Compute the alignment. */
5148 align = (fcode == BUILT_IN_ALLOCA
5149 ? BIGGEST_ALIGNMENT
5150 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5152 /* Compute the maximum size. */
5153 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5154 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5155 : -1);
5157 /* Allocate the desired space. If the allocation stems from the declaration
5158 of a variable-sized object, it cannot accumulate. */
5159 result
5160 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5161 result = convert_memory_address (ptr_mode, result);
5163 return result;
5166 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5167 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5168 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5169 handle_builtin_stack_restore function. */
5171 static rtx
5172 expand_asan_emit_allocas_unpoison (tree exp)
5174 tree arg0 = CALL_EXPR_ARG (exp, 0);
5175 tree arg1 = CALL_EXPR_ARG (exp, 1);
5176 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5177 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5178 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5179 stack_pointer_rtx, NULL_RTX, 0,
5180 OPTAB_LIB_WIDEN);
5181 off = convert_modes (ptr_mode, Pmode, off, 0);
5182 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5183 OPTAB_LIB_WIDEN);
5184 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5185 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5186 top, ptr_mode, bot, ptr_mode);
5187 return ret;
5190 /* Expand a call to bswap builtin in EXP.
5191 Return NULL_RTX if a normal call should be emitted rather than expanding the
5192 function in-line. If convenient, the result should be placed in TARGET.
5193 SUBTARGET may be used as the target for computing one of EXP's operands. */
5195 static rtx
5196 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5197 rtx subtarget)
5199 tree arg;
5200 rtx op0;
5202 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5203 return NULL_RTX;
5205 arg = CALL_EXPR_ARG (exp, 0);
5206 op0 = expand_expr (arg,
5207 subtarget && GET_MODE (subtarget) == target_mode
5208 ? subtarget : NULL_RTX,
5209 target_mode, EXPAND_NORMAL);
5210 if (GET_MODE (op0) != target_mode)
5211 op0 = convert_to_mode (target_mode, op0, 1);
5213 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5215 gcc_assert (target);
5217 return convert_to_mode (target_mode, target, 1);
5220 /* Expand a call to a unary builtin in EXP.
5221 Return NULL_RTX if a normal call should be emitted rather than expanding the
5222 function in-line. If convenient, the result should be placed in TARGET.
5223 SUBTARGET may be used as the target for computing one of EXP's operands. */
5225 static rtx
5226 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5227 rtx subtarget, optab op_optab)
5229 rtx op0;
5231 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5232 return NULL_RTX;
5234 /* Compute the argument. */
5235 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5236 (subtarget
5237 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5238 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5239 VOIDmode, EXPAND_NORMAL);
5240 /* Compute op, into TARGET if possible.
5241 Set TARGET to wherever the result comes back. */
5242 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5243 op_optab, op0, target, op_optab != clrsb_optab);
5244 gcc_assert (target);
5246 return convert_to_mode (target_mode, target, 0);
5249 /* Expand a call to __builtin_expect. We just return our argument
5250 as the builtin_expect semantic should've been already executed by
5251 tree branch prediction pass. */
5253 static rtx
5254 expand_builtin_expect (tree exp, rtx target)
5256 tree arg;
5258 if (call_expr_nargs (exp) < 2)
5259 return const0_rtx;
5260 arg = CALL_EXPR_ARG (exp, 0);
5262 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5263 /* When guessing was done, the hints should be already stripped away. */
5264 gcc_assert (!flag_guess_branch_prob
5265 || optimize == 0 || seen_error ());
5266 return target;
5269 /* Expand a call to __builtin_expect_with_probability. We just return our
5270 argument as the builtin_expect semantic should've been already executed by
5271 tree branch prediction pass. */
5273 static rtx
5274 expand_builtin_expect_with_probability (tree exp, rtx target)
5276 tree arg;
5278 if (call_expr_nargs (exp) < 3)
5279 return const0_rtx;
5280 arg = CALL_EXPR_ARG (exp, 0);
5282 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5283 /* When guessing was done, the hints should be already stripped away. */
5284 gcc_assert (!flag_guess_branch_prob
5285 || optimize == 0 || seen_error ());
5286 return target;
5290 /* Expand a call to __builtin_assume_aligned. We just return our first
5291 argument as the builtin_assume_aligned semantic should've been already
5292 executed by CCP. */
5294 static rtx
5295 expand_builtin_assume_aligned (tree exp, rtx target)
5297 if (call_expr_nargs (exp) < 2)
5298 return const0_rtx;
5299 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5300 EXPAND_NORMAL);
5301 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5302 && (call_expr_nargs (exp) < 3
5303 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5304 return target;
5307 void
5308 expand_builtin_trap (void)
5310 if (targetm.have_trap ())
5312 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5313 /* For trap insns when not accumulating outgoing args force
5314 REG_ARGS_SIZE note to prevent crossjumping of calls with
5315 different args sizes. */
5316 if (!ACCUMULATE_OUTGOING_ARGS)
5317 add_args_size_note (insn, stack_pointer_delta);
5319 else
5321 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5322 tree call_expr = build_call_expr (fn, 0);
5323 expand_call (call_expr, NULL_RTX, false);
5326 emit_barrier ();
5329 /* Expand a call to __builtin_unreachable. We do nothing except emit
5330 a barrier saying that control flow will not pass here.
5332 It is the responsibility of the program being compiled to ensure
5333 that control flow does never reach __builtin_unreachable. */
5334 static void
5335 expand_builtin_unreachable (void)
5337 emit_barrier ();
5340 /* Expand EXP, a call to fabs, fabsf or fabsl.
5341 Return NULL_RTX if a normal call should be emitted rather than expanding
5342 the function inline. If convenient, the result should be placed
5343 in TARGET. SUBTARGET may be used as the target for computing
5344 the operand. */
5346 static rtx
5347 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5349 machine_mode mode;
5350 tree arg;
5351 rtx op0;
5353 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5354 return NULL_RTX;
5356 arg = CALL_EXPR_ARG (exp, 0);
5357 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5358 mode = TYPE_MODE (TREE_TYPE (arg));
5359 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5360 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5363 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5364 Return NULL is a normal call should be emitted rather than expanding the
5365 function inline. If convenient, the result should be placed in TARGET.
5366 SUBTARGET may be used as the target for computing the operand. */
5368 static rtx
5369 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5371 rtx op0, op1;
5372 tree arg;
5374 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5375 return NULL_RTX;
5377 arg = CALL_EXPR_ARG (exp, 0);
5378 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5380 arg = CALL_EXPR_ARG (exp, 1);
5381 op1 = expand_normal (arg);
5383 return expand_copysign (op0, op1, target);
5386 /* Expand a call to __builtin___clear_cache. */
5388 static rtx
5389 expand_builtin___clear_cache (tree exp)
5391 if (!targetm.code_for_clear_cache)
5393 #ifdef CLEAR_INSN_CACHE
5394 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5395 does something. Just do the default expansion to a call to
5396 __clear_cache(). */
5397 return NULL_RTX;
5398 #else
5399 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5400 does nothing. There is no need to call it. Do nothing. */
5401 return const0_rtx;
5402 #endif /* CLEAR_INSN_CACHE */
5405 /* We have a "clear_cache" insn, and it will handle everything. */
5406 tree begin, end;
5407 rtx begin_rtx, end_rtx;
5409 /* We must not expand to a library call. If we did, any
5410 fallback library function in libgcc that might contain a call to
5411 __builtin___clear_cache() would recurse infinitely. */
5412 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5414 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5415 return const0_rtx;
5418 if (targetm.have_clear_cache ())
5420 struct expand_operand ops[2];
5422 begin = CALL_EXPR_ARG (exp, 0);
5423 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5425 end = CALL_EXPR_ARG (exp, 1);
5426 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5428 create_address_operand (&ops[0], begin_rtx);
5429 create_address_operand (&ops[1], end_rtx);
5430 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5431 return const0_rtx;
5433 return const0_rtx;
5436 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5438 static rtx
5439 round_trampoline_addr (rtx tramp)
5441 rtx temp, addend, mask;
5443 /* If we don't need too much alignment, we'll have been guaranteed
5444 proper alignment by get_trampoline_type. */
5445 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5446 return tramp;
5448 /* Round address up to desired boundary. */
5449 temp = gen_reg_rtx (Pmode);
5450 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5451 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5453 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5454 temp, 0, OPTAB_LIB_WIDEN);
5455 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5456 temp, 0, OPTAB_LIB_WIDEN);
5458 return tramp;
5461 static rtx
5462 expand_builtin_init_trampoline (tree exp, bool onstack)
5464 tree t_tramp, t_func, t_chain;
5465 rtx m_tramp, r_tramp, r_chain, tmp;
5467 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5468 POINTER_TYPE, VOID_TYPE))
5469 return NULL_RTX;
5471 t_tramp = CALL_EXPR_ARG (exp, 0);
5472 t_func = CALL_EXPR_ARG (exp, 1);
5473 t_chain = CALL_EXPR_ARG (exp, 2);
5475 r_tramp = expand_normal (t_tramp);
5476 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5477 MEM_NOTRAP_P (m_tramp) = 1;
5479 /* If ONSTACK, the TRAMP argument should be the address of a field
5480 within the local function's FRAME decl. Either way, let's see if
5481 we can fill in the MEM_ATTRs for this memory. */
5482 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5483 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5485 /* Creator of a heap trampoline is responsible for making sure the
5486 address is aligned to at least STACK_BOUNDARY. Normally malloc
5487 will ensure this anyhow. */
5488 tmp = round_trampoline_addr (r_tramp);
5489 if (tmp != r_tramp)
5491 m_tramp = change_address (m_tramp, BLKmode, tmp);
5492 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5493 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5496 /* The FUNC argument should be the address of the nested function.
5497 Extract the actual function decl to pass to the hook. */
5498 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5499 t_func = TREE_OPERAND (t_func, 0);
5500 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5502 r_chain = expand_normal (t_chain);
5504 /* Generate insns to initialize the trampoline. */
5505 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5507 if (onstack)
5509 trampolines_created = 1;
5511 if (targetm.calls.custom_function_descriptors != 0)
5512 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5513 "trampoline generated for nested function %qD", t_func);
5516 return const0_rtx;
5519 static rtx
5520 expand_builtin_adjust_trampoline (tree exp)
5522 rtx tramp;
5524 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5525 return NULL_RTX;
5527 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5528 tramp = round_trampoline_addr (tramp);
5529 if (targetm.calls.trampoline_adjust_address)
5530 tramp = targetm.calls.trampoline_adjust_address (tramp);
5532 return tramp;
5535 /* Expand a call to the builtin descriptor initialization routine.
5536 A descriptor is made up of a couple of pointers to the static
5537 chain and the code entry in this order. */
5539 static rtx
5540 expand_builtin_init_descriptor (tree exp)
5542 tree t_descr, t_func, t_chain;
5543 rtx m_descr, r_descr, r_func, r_chain;
5545 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5546 VOID_TYPE))
5547 return NULL_RTX;
5549 t_descr = CALL_EXPR_ARG (exp, 0);
5550 t_func = CALL_EXPR_ARG (exp, 1);
5551 t_chain = CALL_EXPR_ARG (exp, 2);
5553 r_descr = expand_normal (t_descr);
5554 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5555 MEM_NOTRAP_P (m_descr) = 1;
5557 r_func = expand_normal (t_func);
5558 r_chain = expand_normal (t_chain);
5560 /* Generate insns to initialize the descriptor. */
5561 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5562 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5563 POINTER_SIZE / BITS_PER_UNIT), r_func);
5565 return const0_rtx;
5568 /* Expand a call to the builtin descriptor adjustment routine. */
5570 static rtx
5571 expand_builtin_adjust_descriptor (tree exp)
5573 rtx tramp;
5575 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5576 return NULL_RTX;
5578 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5580 /* Unalign the descriptor to allow runtime identification. */
5581 tramp = plus_constant (ptr_mode, tramp,
5582 targetm.calls.custom_function_descriptors);
5584 return force_operand (tramp, NULL_RTX);
5587 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5588 function. The function first checks whether the back end provides
5589 an insn to implement signbit for the respective mode. If not, it
5590 checks whether the floating point format of the value is such that
5591 the sign bit can be extracted. If that is not the case, error out.
5592 EXP is the expression that is a call to the builtin function; if
5593 convenient, the result should be placed in TARGET. */
5594 static rtx
5595 expand_builtin_signbit (tree exp, rtx target)
5597 const struct real_format *fmt;
5598 scalar_float_mode fmode;
5599 scalar_int_mode rmode, imode;
5600 tree arg;
5601 int word, bitpos;
5602 enum insn_code icode;
5603 rtx temp;
5604 location_t loc = EXPR_LOCATION (exp);
5606 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5607 return NULL_RTX;
5609 arg = CALL_EXPR_ARG (exp, 0);
5610 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5611 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5612 fmt = REAL_MODE_FORMAT (fmode);
5614 arg = builtin_save_expr (arg);
5616 /* Expand the argument yielding a RTX expression. */
5617 temp = expand_normal (arg);
5619 /* Check if the back end provides an insn that handles signbit for the
5620 argument's mode. */
5621 icode = optab_handler (signbit_optab, fmode);
5622 if (icode != CODE_FOR_nothing)
5624 rtx_insn *last = get_last_insn ();
5625 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5626 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5627 return target;
5628 delete_insns_since (last);
5631 /* For floating point formats without a sign bit, implement signbit
5632 as "ARG < 0.0". */
5633 bitpos = fmt->signbit_ro;
5634 if (bitpos < 0)
5636 /* But we can't do this if the format supports signed zero. */
5637 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5639 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5640 build_real (TREE_TYPE (arg), dconst0));
5641 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5644 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5646 imode = int_mode_for_mode (fmode).require ();
5647 temp = gen_lowpart (imode, temp);
5649 else
5651 imode = word_mode;
5652 /* Handle targets with different FP word orders. */
5653 if (FLOAT_WORDS_BIG_ENDIAN)
5654 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5655 else
5656 word = bitpos / BITS_PER_WORD;
5657 temp = operand_subword_force (temp, word, fmode);
5658 bitpos = bitpos % BITS_PER_WORD;
5661 /* Force the intermediate word_mode (or narrower) result into a
5662 register. This avoids attempting to create paradoxical SUBREGs
5663 of floating point modes below. */
5664 temp = force_reg (imode, temp);
5666 /* If the bitpos is within the "result mode" lowpart, the operation
5667 can be implement with a single bitwise AND. Otherwise, we need
5668 a right shift and an AND. */
5670 if (bitpos < GET_MODE_BITSIZE (rmode))
5672 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5674 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5675 temp = gen_lowpart (rmode, temp);
5676 temp = expand_binop (rmode, and_optab, temp,
5677 immed_wide_int_const (mask, rmode),
5678 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5680 else
5682 /* Perform a logical right shift to place the signbit in the least
5683 significant bit, then truncate the result to the desired mode
5684 and mask just this bit. */
5685 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5686 temp = gen_lowpart (rmode, temp);
5687 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5688 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5691 return temp;
5694 /* Expand fork or exec calls. TARGET is the desired target of the
5695 call. EXP is the call. FN is the
5696 identificator of the actual function. IGNORE is nonzero if the
5697 value is to be ignored. */
5699 static rtx
5700 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5702 tree id, decl;
5703 tree call;
5705 /* If we are not profiling, just call the function. */
5706 if (!profile_arc_flag)
5707 return NULL_RTX;
5709 /* Otherwise call the wrapper. This should be equivalent for the rest of
5710 compiler, so the code does not diverge, and the wrapper may run the
5711 code necessary for keeping the profiling sane. */
5713 switch (DECL_FUNCTION_CODE (fn))
5715 case BUILT_IN_FORK:
5716 id = get_identifier ("__gcov_fork");
5717 break;
5719 case BUILT_IN_EXECL:
5720 id = get_identifier ("__gcov_execl");
5721 break;
5723 case BUILT_IN_EXECV:
5724 id = get_identifier ("__gcov_execv");
5725 break;
5727 case BUILT_IN_EXECLP:
5728 id = get_identifier ("__gcov_execlp");
5729 break;
5731 case BUILT_IN_EXECLE:
5732 id = get_identifier ("__gcov_execle");
5733 break;
5735 case BUILT_IN_EXECVP:
5736 id = get_identifier ("__gcov_execvp");
5737 break;
5739 case BUILT_IN_EXECVE:
5740 id = get_identifier ("__gcov_execve");
5741 break;
5743 default:
5744 gcc_unreachable ();
5747 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5748 FUNCTION_DECL, id, TREE_TYPE (fn));
5749 DECL_EXTERNAL (decl) = 1;
5750 TREE_PUBLIC (decl) = 1;
5751 DECL_ARTIFICIAL (decl) = 1;
5752 TREE_NOTHROW (decl) = 1;
5753 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5754 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5755 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5756 return expand_call (call, target, ignore);
5761 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5762 the pointer in these functions is void*, the tree optimizers may remove
5763 casts. The mode computed in expand_builtin isn't reliable either, due
5764 to __sync_bool_compare_and_swap.
5766 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5767 group of builtins. This gives us log2 of the mode size. */
5769 static inline machine_mode
5770 get_builtin_sync_mode (int fcode_diff)
5772 /* The size is not negotiable, so ask not to get BLKmode in return
5773 if the target indicates that a smaller size would be better. */
5774 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5777 /* Expand the memory expression LOC and return the appropriate memory operand
5778 for the builtin_sync operations. */
5780 static rtx
5781 get_builtin_sync_mem (tree loc, machine_mode mode)
5783 rtx addr, mem;
5785 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5786 addr = convert_memory_address (Pmode, addr);
5788 /* Note that we explicitly do not want any alias information for this
5789 memory, so that we kill all other live memories. Otherwise we don't
5790 satisfy the full barrier semantics of the intrinsic. */
5791 mem = validize_mem (gen_rtx_MEM (mode, addr));
5793 /* The alignment needs to be at least according to that of the mode. */
5794 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5795 get_pointer_alignment (loc)));
5796 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5797 MEM_VOLATILE_P (mem) = 1;
5799 return mem;
5802 /* Make sure an argument is in the right mode.
5803 EXP is the tree argument.
5804 MODE is the mode it should be in. */
5806 static rtx
5807 expand_expr_force_mode (tree exp, machine_mode mode)
5809 rtx val;
5810 machine_mode old_mode;
5812 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5813 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5814 of CONST_INTs, where we know the old_mode only from the call argument. */
5816 old_mode = GET_MODE (val);
5817 if (old_mode == VOIDmode)
5818 old_mode = TYPE_MODE (TREE_TYPE (exp));
5819 val = convert_modes (mode, old_mode, val, 1);
5820 return val;
5824 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5825 EXP is the CALL_EXPR. CODE is the rtx code
5826 that corresponds to the arithmetic or logical operation from the name;
5827 an exception here is that NOT actually means NAND. TARGET is an optional
5828 place for us to store the results; AFTER is true if this is the
5829 fetch_and_xxx form. */
5831 static rtx
5832 expand_builtin_sync_operation (machine_mode mode, tree exp,
5833 enum rtx_code code, bool after,
5834 rtx target)
5836 rtx val, mem;
5837 location_t loc = EXPR_LOCATION (exp);
5839 if (code == NOT && warn_sync_nand)
5841 tree fndecl = get_callee_fndecl (exp);
5842 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5844 static bool warned_f_a_n, warned_n_a_f;
5846 switch (fcode)
5848 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5849 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5850 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5851 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5852 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5853 if (warned_f_a_n)
5854 break;
5856 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5857 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5858 warned_f_a_n = true;
5859 break;
5861 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5862 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5863 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5864 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5865 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5866 if (warned_n_a_f)
5867 break;
5869 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5870 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5871 warned_n_a_f = true;
5872 break;
5874 default:
5875 gcc_unreachable ();
5879 /* Expand the operands. */
5880 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5881 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5883 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5884 after);
5887 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5888 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5889 true if this is the boolean form. TARGET is a place for us to store the
5890 results; this is NOT optional if IS_BOOL is true. */
5892 static rtx
5893 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5894 bool is_bool, rtx target)
5896 rtx old_val, new_val, mem;
5897 rtx *pbool, *poval;
5899 /* Expand the operands. */
5900 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5901 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5902 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5904 pbool = poval = NULL;
5905 if (target != const0_rtx)
5907 if (is_bool)
5908 pbool = &target;
5909 else
5910 poval = &target;
5912 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5913 false, MEMMODEL_SYNC_SEQ_CST,
5914 MEMMODEL_SYNC_SEQ_CST))
5915 return NULL_RTX;
5917 return target;
5920 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5921 general form is actually an atomic exchange, and some targets only
5922 support a reduced form with the second argument being a constant 1.
5923 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5924 the results. */
5926 static rtx
5927 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5928 rtx target)
5930 rtx val, mem;
5932 /* Expand the operands. */
5933 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5934 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5936 return expand_sync_lock_test_and_set (target, mem, val);
5939 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5941 static void
5942 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5944 rtx mem;
5946 /* Expand the operands. */
5947 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5949 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5952 /* Given an integer representing an ``enum memmodel'', verify its
5953 correctness and return the memory model enum. */
5955 static enum memmodel
5956 get_memmodel (tree exp)
5958 rtx op;
5959 unsigned HOST_WIDE_INT val;
5960 source_location loc
5961 = expansion_point_location_if_in_system_header (input_location);
5963 /* If the parameter is not a constant, it's a run time value so we'll just
5964 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5965 if (TREE_CODE (exp) != INTEGER_CST)
5966 return MEMMODEL_SEQ_CST;
5968 op = expand_normal (exp);
5970 val = INTVAL (op);
5971 if (targetm.memmodel_check)
5972 val = targetm.memmodel_check (val);
5973 else if (val & ~MEMMODEL_MASK)
5975 warning_at (loc, OPT_Winvalid_memory_model,
5976 "unknown architecture specifier in memory model to builtin");
5977 return MEMMODEL_SEQ_CST;
5980 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5981 if (memmodel_base (val) >= MEMMODEL_LAST)
5983 warning_at (loc, OPT_Winvalid_memory_model,
5984 "invalid memory model argument to builtin");
5985 return MEMMODEL_SEQ_CST;
5988 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5989 be conservative and promote consume to acquire. */
5990 if (val == MEMMODEL_CONSUME)
5991 val = MEMMODEL_ACQUIRE;
5993 return (enum memmodel) val;
5996 /* Expand the __atomic_exchange intrinsic:
5997 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5998 EXP is the CALL_EXPR.
5999 TARGET is an optional place for us to store the results. */
6001 static rtx
6002 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6004 rtx val, mem;
6005 enum memmodel model;
6007 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6009 if (!flag_inline_atomics)
6010 return NULL_RTX;
6012 /* Expand the operands. */
6013 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6014 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6016 return expand_atomic_exchange (target, mem, val, model);
6019 /* Expand the __atomic_compare_exchange intrinsic:
6020 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6021 TYPE desired, BOOL weak,
6022 enum memmodel success,
6023 enum memmodel failure)
6024 EXP is the CALL_EXPR.
6025 TARGET is an optional place for us to store the results. */
6027 static rtx
6028 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6029 rtx target)
6031 rtx expect, desired, mem, oldval;
6032 rtx_code_label *label;
6033 enum memmodel success, failure;
6034 tree weak;
6035 bool is_weak;
6036 source_location loc
6037 = expansion_point_location_if_in_system_header (input_location);
6039 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6040 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6042 if (failure > success)
6044 warning_at (loc, OPT_Winvalid_memory_model,
6045 "failure memory model cannot be stronger than success "
6046 "memory model for %<__atomic_compare_exchange%>");
6047 success = MEMMODEL_SEQ_CST;
6050 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6052 warning_at (loc, OPT_Winvalid_memory_model,
6053 "invalid failure memory model for "
6054 "%<__atomic_compare_exchange%>");
6055 failure = MEMMODEL_SEQ_CST;
6056 success = MEMMODEL_SEQ_CST;
6060 if (!flag_inline_atomics)
6061 return NULL_RTX;
6063 /* Expand the operands. */
6064 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6066 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6067 expect = convert_memory_address (Pmode, expect);
6068 expect = gen_rtx_MEM (mode, expect);
6069 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6071 weak = CALL_EXPR_ARG (exp, 3);
6072 is_weak = false;
6073 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6074 is_weak = true;
6076 if (target == const0_rtx)
6077 target = NULL;
6079 /* Lest the rtl backend create a race condition with an imporoper store
6080 to memory, always create a new pseudo for OLDVAL. */
6081 oldval = NULL;
6083 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6084 is_weak, success, failure))
6085 return NULL_RTX;
6087 /* Conditionally store back to EXPECT, lest we create a race condition
6088 with an improper store to memory. */
6089 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6090 the normal case where EXPECT is totally private, i.e. a register. At
6091 which point the store can be unconditional. */
6092 label = gen_label_rtx ();
6093 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6094 GET_MODE (target), 1, label);
6095 emit_move_insn (expect, oldval);
6096 emit_label (label);
6098 return target;
6101 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6102 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6103 call. The weak parameter must be dropped to match the expected parameter
6104 list and the expected argument changed from value to pointer to memory
6105 slot. */
6107 static void
6108 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6110 unsigned int z;
6111 vec<tree, va_gc> *vec;
6113 vec_alloc (vec, 5);
6114 vec->quick_push (gimple_call_arg (call, 0));
6115 tree expected = gimple_call_arg (call, 1);
6116 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6117 TREE_TYPE (expected));
6118 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6119 if (expd != x)
6120 emit_move_insn (x, expd);
6121 tree v = make_tree (TREE_TYPE (expected), x);
6122 vec->quick_push (build1 (ADDR_EXPR,
6123 build_pointer_type (TREE_TYPE (expected)), v));
6124 vec->quick_push (gimple_call_arg (call, 2));
6125 /* Skip the boolean weak parameter. */
6126 for (z = 4; z < 6; z++)
6127 vec->quick_push (gimple_call_arg (call, z));
6128 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6129 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6130 gcc_assert (bytes_log2 < 5);
6131 built_in_function fncode
6132 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6133 + bytes_log2);
6134 tree fndecl = builtin_decl_explicit (fncode);
6135 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6136 fndecl);
6137 tree exp = build_call_vec (boolean_type_node, fn, vec);
6138 tree lhs = gimple_call_lhs (call);
6139 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6140 if (lhs)
6142 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6143 if (GET_MODE (boolret) != mode)
6144 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6145 x = force_reg (mode, x);
6146 write_complex_part (target, boolret, true);
6147 write_complex_part (target, x, false);
6151 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6153 void
6154 expand_ifn_atomic_compare_exchange (gcall *call)
6156 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6157 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6158 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6159 rtx expect, desired, mem, oldval, boolret;
6160 enum memmodel success, failure;
6161 tree lhs;
6162 bool is_weak;
6163 source_location loc
6164 = expansion_point_location_if_in_system_header (gimple_location (call));
6166 success = get_memmodel (gimple_call_arg (call, 4));
6167 failure = get_memmodel (gimple_call_arg (call, 5));
6169 if (failure > success)
6171 warning_at (loc, OPT_Winvalid_memory_model,
6172 "failure memory model cannot be stronger than success "
6173 "memory model for %<__atomic_compare_exchange%>");
6174 success = MEMMODEL_SEQ_CST;
6177 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6179 warning_at (loc, OPT_Winvalid_memory_model,
6180 "invalid failure memory model for "
6181 "%<__atomic_compare_exchange%>");
6182 failure = MEMMODEL_SEQ_CST;
6183 success = MEMMODEL_SEQ_CST;
6186 if (!flag_inline_atomics)
6188 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6189 return;
6192 /* Expand the operands. */
6193 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6195 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6196 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6198 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6200 boolret = NULL;
6201 oldval = NULL;
6203 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6204 is_weak, success, failure))
6206 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6207 return;
6210 lhs = gimple_call_lhs (call);
6211 if (lhs)
6213 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6214 if (GET_MODE (boolret) != mode)
6215 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6216 write_complex_part (target, boolret, true);
6217 write_complex_part (target, oldval, false);
6221 /* Expand the __atomic_load intrinsic:
6222 TYPE __atomic_load (TYPE *object, enum memmodel)
6223 EXP is the CALL_EXPR.
6224 TARGET is an optional place for us to store the results. */
6226 static rtx
6227 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6229 rtx mem;
6230 enum memmodel model;
6232 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6233 if (is_mm_release (model) || is_mm_acq_rel (model))
6235 source_location loc
6236 = expansion_point_location_if_in_system_header (input_location);
6237 warning_at (loc, OPT_Winvalid_memory_model,
6238 "invalid memory model for %<__atomic_load%>");
6239 model = MEMMODEL_SEQ_CST;
6242 if (!flag_inline_atomics)
6243 return NULL_RTX;
6245 /* Expand the operand. */
6246 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6248 return expand_atomic_load (target, mem, model);
6252 /* Expand the __atomic_store intrinsic:
6253 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6254 EXP is the CALL_EXPR.
6255 TARGET is an optional place for us to store the results. */
6257 static rtx
6258 expand_builtin_atomic_store (machine_mode mode, tree exp)
6260 rtx mem, val;
6261 enum memmodel model;
6263 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6264 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6265 || is_mm_release (model)))
6267 source_location loc
6268 = expansion_point_location_if_in_system_header (input_location);
6269 warning_at (loc, OPT_Winvalid_memory_model,
6270 "invalid memory model for %<__atomic_store%>");
6271 model = MEMMODEL_SEQ_CST;
6274 if (!flag_inline_atomics)
6275 return NULL_RTX;
6277 /* Expand the operands. */
6278 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6279 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6281 return expand_atomic_store (mem, val, model, false);
6284 /* Expand the __atomic_fetch_XXX intrinsic:
6285 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6286 EXP is the CALL_EXPR.
6287 TARGET is an optional place for us to store the results.
6288 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6289 FETCH_AFTER is true if returning the result of the operation.
6290 FETCH_AFTER is false if returning the value before the operation.
6291 IGNORE is true if the result is not used.
6292 EXT_CALL is the correct builtin for an external call if this cannot be
6293 resolved to an instruction sequence. */
6295 static rtx
6296 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6297 enum rtx_code code, bool fetch_after,
6298 bool ignore, enum built_in_function ext_call)
6300 rtx val, mem, ret;
6301 enum memmodel model;
6302 tree fndecl;
6303 tree addr;
6305 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6307 /* Expand the operands. */
6308 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6309 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6311 /* Only try generating instructions if inlining is turned on. */
6312 if (flag_inline_atomics)
6314 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6315 if (ret)
6316 return ret;
6319 /* Return if a different routine isn't needed for the library call. */
6320 if (ext_call == BUILT_IN_NONE)
6321 return NULL_RTX;
6323 /* Change the call to the specified function. */
6324 fndecl = get_callee_fndecl (exp);
6325 addr = CALL_EXPR_FN (exp);
6326 STRIP_NOPS (addr);
6328 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6329 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6331 /* If we will emit code after the call, the call can not be a tail call.
6332 If it is emitted as a tail call, a barrier is emitted after it, and
6333 then all trailing code is removed. */
6334 if (!ignore)
6335 CALL_EXPR_TAILCALL (exp) = 0;
6337 /* Expand the call here so we can emit trailing code. */
6338 ret = expand_call (exp, target, ignore);
6340 /* Replace the original function just in case it matters. */
6341 TREE_OPERAND (addr, 0) = fndecl;
6343 /* Then issue the arithmetic correction to return the right result. */
6344 if (!ignore)
6346 if (code == NOT)
6348 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6349 OPTAB_LIB_WIDEN);
6350 ret = expand_simple_unop (mode, NOT, ret, target, true);
6352 else
6353 ret = expand_simple_binop (mode, code, ret, val, target, true,
6354 OPTAB_LIB_WIDEN);
6356 return ret;
6359 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6361 void
6362 expand_ifn_atomic_bit_test_and (gcall *call)
6364 tree ptr = gimple_call_arg (call, 0);
6365 tree bit = gimple_call_arg (call, 1);
6366 tree flag = gimple_call_arg (call, 2);
6367 tree lhs = gimple_call_lhs (call);
6368 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6369 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6370 enum rtx_code code;
6371 optab optab;
6372 struct expand_operand ops[5];
6374 gcc_assert (flag_inline_atomics);
6376 if (gimple_call_num_args (call) == 4)
6377 model = get_memmodel (gimple_call_arg (call, 3));
6379 rtx mem = get_builtin_sync_mem (ptr, mode);
6380 rtx val = expand_expr_force_mode (bit, mode);
6382 switch (gimple_call_internal_fn (call))
6384 case IFN_ATOMIC_BIT_TEST_AND_SET:
6385 code = IOR;
6386 optab = atomic_bit_test_and_set_optab;
6387 break;
6388 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6389 code = XOR;
6390 optab = atomic_bit_test_and_complement_optab;
6391 break;
6392 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6393 code = AND;
6394 optab = atomic_bit_test_and_reset_optab;
6395 break;
6396 default:
6397 gcc_unreachable ();
6400 if (lhs == NULL_TREE)
6402 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6403 val, NULL_RTX, true, OPTAB_DIRECT);
6404 if (code == AND)
6405 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6406 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6407 return;
6410 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6411 enum insn_code icode = direct_optab_handler (optab, mode);
6412 gcc_assert (icode != CODE_FOR_nothing);
6413 create_output_operand (&ops[0], target, mode);
6414 create_fixed_operand (&ops[1], mem);
6415 create_convert_operand_to (&ops[2], val, mode, true);
6416 create_integer_operand (&ops[3], model);
6417 create_integer_operand (&ops[4], integer_onep (flag));
6418 if (maybe_expand_insn (icode, 5, ops))
6419 return;
6421 rtx bitval = val;
6422 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6423 val, NULL_RTX, true, OPTAB_DIRECT);
6424 rtx maskval = val;
6425 if (code == AND)
6426 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6427 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6428 code, model, false);
6429 if (integer_onep (flag))
6431 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6432 NULL_RTX, true, OPTAB_DIRECT);
6433 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6434 true, OPTAB_DIRECT);
6436 else
6437 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6438 OPTAB_DIRECT);
6439 if (result != target)
6440 emit_move_insn (target, result);
6443 /* Expand an atomic clear operation.
6444 void _atomic_clear (BOOL *obj, enum memmodel)
6445 EXP is the call expression. */
6447 static rtx
6448 expand_builtin_atomic_clear (tree exp)
6450 machine_mode mode;
6451 rtx mem, ret;
6452 enum memmodel model;
6454 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6455 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6456 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6458 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6460 source_location loc
6461 = expansion_point_location_if_in_system_header (input_location);
6462 warning_at (loc, OPT_Winvalid_memory_model,
6463 "invalid memory model for %<__atomic_store%>");
6464 model = MEMMODEL_SEQ_CST;
6467 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6468 Failing that, a store is issued by __atomic_store. The only way this can
6469 fail is if the bool type is larger than a word size. Unlikely, but
6470 handle it anyway for completeness. Assume a single threaded model since
6471 there is no atomic support in this case, and no barriers are required. */
6472 ret = expand_atomic_store (mem, const0_rtx, model, true);
6473 if (!ret)
6474 emit_move_insn (mem, const0_rtx);
6475 return const0_rtx;
6478 /* Expand an atomic test_and_set operation.
6479 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6480 EXP is the call expression. */
6482 static rtx
6483 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6485 rtx mem;
6486 enum memmodel model;
6487 machine_mode mode;
6489 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6490 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6491 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6493 return expand_atomic_test_and_set (target, mem, model);
6497 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6498 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6500 static tree
6501 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6503 int size;
6504 machine_mode mode;
6505 unsigned int mode_align, type_align;
6507 if (TREE_CODE (arg0) != INTEGER_CST)
6508 return NULL_TREE;
6510 /* We need a corresponding integer mode for the access to be lock-free. */
6511 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6512 if (!int_mode_for_size (size, 0).exists (&mode))
6513 return boolean_false_node;
6515 mode_align = GET_MODE_ALIGNMENT (mode);
6517 if (TREE_CODE (arg1) == INTEGER_CST)
6519 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6521 /* Either this argument is null, or it's a fake pointer encoding
6522 the alignment of the object. */
6523 val = least_bit_hwi (val);
6524 val *= BITS_PER_UNIT;
6526 if (val == 0 || mode_align < val)
6527 type_align = mode_align;
6528 else
6529 type_align = val;
6531 else
6533 tree ttype = TREE_TYPE (arg1);
6535 /* This function is usually invoked and folded immediately by the front
6536 end before anything else has a chance to look at it. The pointer
6537 parameter at this point is usually cast to a void *, so check for that
6538 and look past the cast. */
6539 if (CONVERT_EXPR_P (arg1)
6540 && POINTER_TYPE_P (ttype)
6541 && VOID_TYPE_P (TREE_TYPE (ttype))
6542 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6543 arg1 = TREE_OPERAND (arg1, 0);
6545 ttype = TREE_TYPE (arg1);
6546 gcc_assert (POINTER_TYPE_P (ttype));
6548 /* Get the underlying type of the object. */
6549 ttype = TREE_TYPE (ttype);
6550 type_align = TYPE_ALIGN (ttype);
6553 /* If the object has smaller alignment, the lock free routines cannot
6554 be used. */
6555 if (type_align < mode_align)
6556 return boolean_false_node;
6558 /* Check if a compare_and_swap pattern exists for the mode which represents
6559 the required size. The pattern is not allowed to fail, so the existence
6560 of the pattern indicates support is present. Also require that an
6561 atomic load exists for the required size. */
6562 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6563 return boolean_true_node;
6564 else
6565 return boolean_false_node;
6568 /* Return true if the parameters to call EXP represent an object which will
6569 always generate lock free instructions. The first argument represents the
6570 size of the object, and the second parameter is a pointer to the object
6571 itself. If NULL is passed for the object, then the result is based on
6572 typical alignment for an object of the specified size. Otherwise return
6573 false. */
6575 static rtx
6576 expand_builtin_atomic_always_lock_free (tree exp)
6578 tree size;
6579 tree arg0 = CALL_EXPR_ARG (exp, 0);
6580 tree arg1 = CALL_EXPR_ARG (exp, 1);
6582 if (TREE_CODE (arg0) != INTEGER_CST)
6584 error ("non-constant argument 1 to __atomic_always_lock_free");
6585 return const0_rtx;
6588 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6589 if (size == boolean_true_node)
6590 return const1_rtx;
6591 return const0_rtx;
6594 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6595 is lock free on this architecture. */
6597 static tree
6598 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6600 if (!flag_inline_atomics)
6601 return NULL_TREE;
6603 /* If it isn't always lock free, don't generate a result. */
6604 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6605 return boolean_true_node;
6607 return NULL_TREE;
6610 /* Return true if the parameters to call EXP represent an object which will
6611 always generate lock free instructions. The first argument represents the
6612 size of the object, and the second parameter is a pointer to the object
6613 itself. If NULL is passed for the object, then the result is based on
6614 typical alignment for an object of the specified size. Otherwise return
6615 NULL*/
6617 static rtx
6618 expand_builtin_atomic_is_lock_free (tree exp)
6620 tree size;
6621 tree arg0 = CALL_EXPR_ARG (exp, 0);
6622 tree arg1 = CALL_EXPR_ARG (exp, 1);
6624 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6626 error ("non-integer argument 1 to __atomic_is_lock_free");
6627 return NULL_RTX;
6630 if (!flag_inline_atomics)
6631 return NULL_RTX;
6633 /* If the value is known at compile time, return the RTX for it. */
6634 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6635 if (size == boolean_true_node)
6636 return const1_rtx;
6638 return NULL_RTX;
6641 /* Expand the __atomic_thread_fence intrinsic:
6642 void __atomic_thread_fence (enum memmodel)
6643 EXP is the CALL_EXPR. */
6645 static void
6646 expand_builtin_atomic_thread_fence (tree exp)
6648 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6649 expand_mem_thread_fence (model);
6652 /* Expand the __atomic_signal_fence intrinsic:
6653 void __atomic_signal_fence (enum memmodel)
6654 EXP is the CALL_EXPR. */
6656 static void
6657 expand_builtin_atomic_signal_fence (tree exp)
6659 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6660 expand_mem_signal_fence (model);
6663 /* Expand the __sync_synchronize intrinsic. */
6665 static void
6666 expand_builtin_sync_synchronize (void)
6668 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6671 static rtx
6672 expand_builtin_thread_pointer (tree exp, rtx target)
6674 enum insn_code icode;
6675 if (!validate_arglist (exp, VOID_TYPE))
6676 return const0_rtx;
6677 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6678 if (icode != CODE_FOR_nothing)
6680 struct expand_operand op;
6681 /* If the target is not sutitable then create a new target. */
6682 if (target == NULL_RTX
6683 || !REG_P (target)
6684 || GET_MODE (target) != Pmode)
6685 target = gen_reg_rtx (Pmode);
6686 create_output_operand (&op, target, Pmode);
6687 expand_insn (icode, 1, &op);
6688 return target;
6690 error ("__builtin_thread_pointer is not supported on this target");
6691 return const0_rtx;
6694 static void
6695 expand_builtin_set_thread_pointer (tree exp)
6697 enum insn_code icode;
6698 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6699 return;
6700 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6701 if (icode != CODE_FOR_nothing)
6703 struct expand_operand op;
6704 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6705 Pmode, EXPAND_NORMAL);
6706 create_input_operand (&op, val, Pmode);
6707 expand_insn (icode, 1, &op);
6708 return;
6710 error ("__builtin_set_thread_pointer is not supported on this target");
6714 /* Emit code to restore the current value of stack. */
6716 static void
6717 expand_stack_restore (tree var)
6719 rtx_insn *prev;
6720 rtx sa = expand_normal (var);
6722 sa = convert_memory_address (Pmode, sa);
6724 prev = get_last_insn ();
6725 emit_stack_restore (SAVE_BLOCK, sa);
6727 record_new_stack_level ();
6729 fixup_args_size_notes (prev, get_last_insn (), 0);
6732 /* Emit code to save the current value of stack. */
6734 static rtx
6735 expand_stack_save (void)
6737 rtx ret = NULL_RTX;
6739 emit_stack_save (SAVE_BLOCK, &ret);
6740 return ret;
6743 /* Emit code to get the openacc gang, worker or vector id or size. */
6745 static rtx
6746 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6748 const char *name;
6749 rtx fallback_retval;
6750 rtx_insn *(*gen_fn) (rtx, rtx);
6751 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6753 case BUILT_IN_GOACC_PARLEVEL_ID:
6754 name = "__builtin_goacc_parlevel_id";
6755 fallback_retval = const0_rtx;
6756 gen_fn = targetm.gen_oacc_dim_pos;
6757 break;
6758 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6759 name = "__builtin_goacc_parlevel_size";
6760 fallback_retval = const1_rtx;
6761 gen_fn = targetm.gen_oacc_dim_size;
6762 break;
6763 default:
6764 gcc_unreachable ();
6767 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6769 error ("%qs only supported in OpenACC code", name);
6770 return const0_rtx;
6773 tree arg = CALL_EXPR_ARG (exp, 0);
6774 if (TREE_CODE (arg) != INTEGER_CST)
6776 error ("non-constant argument 0 to %qs", name);
6777 return const0_rtx;
6780 int dim = TREE_INT_CST_LOW (arg);
6781 switch (dim)
6783 case GOMP_DIM_GANG:
6784 case GOMP_DIM_WORKER:
6785 case GOMP_DIM_VECTOR:
6786 break;
6787 default:
6788 error ("illegal argument 0 to %qs", name);
6789 return const0_rtx;
6792 if (ignore)
6793 return target;
6795 if (target == NULL_RTX)
6796 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6798 if (!targetm.have_oacc_dim_size ())
6800 emit_move_insn (target, fallback_retval);
6801 return target;
6804 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6805 emit_insn (gen_fn (reg, GEN_INT (dim)));
6806 if (reg != target)
6807 emit_move_insn (target, reg);
6809 return target;
6812 /* Expand a string compare operation using a sequence of char comparison
6813 to get rid of the calling overhead, with result going to TARGET if
6814 that's convenient.
6816 VAR_STR is the variable string source;
6817 CONST_STR is the constant string source;
6818 LENGTH is the number of chars to compare;
6819 CONST_STR_N indicates which source string is the constant string;
6820 IS_MEMCMP indicates whether it's a memcmp or strcmp.
6822 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
6824 target = (int) (unsigned char) var_str[0]
6825 - (int) (unsigned char) const_str[0];
6826 if (target != 0)
6827 goto ne_label;
6829 target = (int) (unsigned char) var_str[length - 2]
6830 - (int) (unsigned char) const_str[length - 2];
6831 if (target != 0)
6832 goto ne_label;
6833 target = (int) (unsigned char) var_str[length - 1]
6834 - (int) (unsigned char) const_str[length - 1];
6835 ne_label:
6838 static rtx
6839 inline_string_cmp (rtx target, tree var_str, const char *const_str,
6840 unsigned HOST_WIDE_INT length,
6841 int const_str_n, machine_mode mode)
6843 HOST_WIDE_INT offset = 0;
6844 rtx var_rtx_array
6845 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
6846 rtx var_rtx = NULL_RTX;
6847 rtx const_rtx = NULL_RTX;
6848 rtx result = target ? target : gen_reg_rtx (mode);
6849 rtx_code_label *ne_label = gen_label_rtx ();
6850 tree unit_type_node = unsigned_char_type_node;
6851 scalar_int_mode unit_mode
6852 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
6854 start_sequence ();
6856 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
6858 var_rtx
6859 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
6860 const_rtx = c_readstr (const_str + offset, unit_mode);
6861 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
6862 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
6864 op0 = convert_modes (mode, unit_mode, op0, 1);
6865 op1 = convert_modes (mode, unit_mode, op1, 1);
6866 result = expand_simple_binop (mode, MINUS, op0, op1,
6867 result, 1, OPTAB_WIDEN);
6868 if (i < length - 1)
6869 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
6870 mode, true, ne_label);
6871 offset += GET_MODE_SIZE (unit_mode);
6874 emit_label (ne_label);
6875 rtx_insn *insns = get_insns ();
6876 end_sequence ();
6877 emit_insn (insns);
6879 return result;
6882 /* Inline expansion a call to str(n)cmp, with result going to
6883 TARGET if that's convenient.
6884 If the call is not been inlined, return NULL_RTX. */
6885 static rtx
6886 inline_expand_builtin_string_cmp (tree exp, rtx target)
6888 tree fndecl = get_callee_fndecl (exp);
6889 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6890 unsigned HOST_WIDE_INT length = 0;
6891 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
6893 /* Do NOT apply this inlining expansion when optimizing for size or
6894 optimization level below 2. */
6895 if (optimize < 2 || optimize_insn_for_size_p ())
6896 return NULL_RTX;
6898 gcc_checking_assert (fcode == BUILT_IN_STRCMP
6899 || fcode == BUILT_IN_STRNCMP
6900 || fcode == BUILT_IN_MEMCMP);
6902 /* On a target where the type of the call (int) has same or narrower presicion
6903 than unsigned char, give up the inlining expansion. */
6904 if (TYPE_PRECISION (unsigned_char_type_node)
6905 >= TYPE_PRECISION (TREE_TYPE (exp)))
6906 return NULL_RTX;
6908 tree arg1 = CALL_EXPR_ARG (exp, 0);
6909 tree arg2 = CALL_EXPR_ARG (exp, 1);
6910 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
6912 unsigned HOST_WIDE_INT len1 = 0;
6913 unsigned HOST_WIDE_INT len2 = 0;
6914 unsigned HOST_WIDE_INT len3 = 0;
6916 const char *src_str1 = c_getstr (arg1, &len1);
6917 const char *src_str2 = c_getstr (arg2, &len2);
6919 /* If neither strings is constant string, the call is not qualify. */
6920 if (!src_str1 && !src_str2)
6921 return NULL_RTX;
6923 /* For strncmp, if the length is not a const, not qualify. */
6924 if (is_ncmp && !tree_fits_uhwi_p (len3_tree))
6925 return NULL_RTX;
6927 int const_str_n = 0;
6928 if (!len1)
6929 const_str_n = 2;
6930 else if (!len2)
6931 const_str_n = 1;
6932 else if (len2 > len1)
6933 const_str_n = 1;
6934 else
6935 const_str_n = 2;
6937 gcc_checking_assert (const_str_n > 0);
6938 length = (const_str_n == 1) ? len1 : len2;
6940 if (is_ncmp && (len3 = tree_to_uhwi (len3_tree)) < length)
6941 length = len3;
6943 /* If the length of the comparision is larger than the threshold,
6944 do nothing. */
6945 if (length > (unsigned HOST_WIDE_INT)
6946 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
6947 return NULL_RTX;
6949 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6951 /* Now, start inline expansion the call. */
6952 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
6953 (const_str_n == 1) ? src_str1 : src_str2, length,
6954 const_str_n, mode);
6957 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
6958 represents the size of the first argument to that call, or VOIDmode
6959 if the argument is a pointer. IGNORE will be true if the result
6960 isn't used. */
6961 static rtx
6962 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
6963 bool ignore)
6965 rtx val, failsafe;
6966 unsigned nargs = call_expr_nargs (exp);
6968 tree arg0 = CALL_EXPR_ARG (exp, 0);
6970 if (mode == VOIDmode)
6972 mode = TYPE_MODE (TREE_TYPE (arg0));
6973 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
6976 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
6978 /* An optional second argument can be used as a failsafe value on
6979 some machines. If it isn't present, then the failsafe value is
6980 assumed to be 0. */
6981 if (nargs > 1)
6983 tree arg1 = CALL_EXPR_ARG (exp, 1);
6984 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
6986 else
6987 failsafe = const0_rtx;
6989 /* If the result isn't used, the behavior is undefined. It would be
6990 nice to emit a warning here, but path splitting means this might
6991 happen with legitimate code. So simply drop the builtin
6992 expansion in that case; we've handled any side-effects above. */
6993 if (ignore)
6994 return const0_rtx;
6996 /* If we don't have a suitable target, create one to hold the result. */
6997 if (target == NULL || GET_MODE (target) != mode)
6998 target = gen_reg_rtx (mode);
7000 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7001 val = convert_modes (mode, VOIDmode, val, false);
7003 return targetm.speculation_safe_value (mode, target, val, failsafe);
7006 /* Expand an expression EXP that calls a built-in function,
7007 with result going to TARGET if that's convenient
7008 (and in mode MODE if that's convenient).
7009 SUBTARGET may be used as the target for computing one of EXP's operands.
7010 IGNORE is nonzero if the value is to be ignored. */
7013 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7014 int ignore)
7016 tree fndecl = get_callee_fndecl (exp);
7017 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7018 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7019 int flags;
7021 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7022 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7024 /* When ASan is enabled, we don't want to expand some memory/string
7025 builtins and rely on libsanitizer's hooks. This allows us to avoid
7026 redundant checks and be sure, that possible overflow will be detected
7027 by ASan. */
7029 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7030 return expand_call (exp, target, ignore);
7032 /* When not optimizing, generate calls to library functions for a certain
7033 set of builtins. */
7034 if (!optimize
7035 && !called_as_built_in (fndecl)
7036 && fcode != BUILT_IN_FORK
7037 && fcode != BUILT_IN_EXECL
7038 && fcode != BUILT_IN_EXECV
7039 && fcode != BUILT_IN_EXECLP
7040 && fcode != BUILT_IN_EXECLE
7041 && fcode != BUILT_IN_EXECVP
7042 && fcode != BUILT_IN_EXECVE
7043 && !ALLOCA_FUNCTION_CODE_P (fcode)
7044 && fcode != BUILT_IN_FREE)
7045 return expand_call (exp, target, ignore);
7047 /* The built-in function expanders test for target == const0_rtx
7048 to determine whether the function's result will be ignored. */
7049 if (ignore)
7050 target = const0_rtx;
7052 /* If the result of a pure or const built-in function is ignored, and
7053 none of its arguments are volatile, we can avoid expanding the
7054 built-in call and just evaluate the arguments for side-effects. */
7055 if (target == const0_rtx
7056 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7057 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7059 bool volatilep = false;
7060 tree arg;
7061 call_expr_arg_iterator iter;
7063 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7064 if (TREE_THIS_VOLATILE (arg))
7066 volatilep = true;
7067 break;
7070 if (! volatilep)
7072 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7073 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7074 return const0_rtx;
7078 switch (fcode)
7080 CASE_FLT_FN (BUILT_IN_FABS):
7081 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7082 case BUILT_IN_FABSD32:
7083 case BUILT_IN_FABSD64:
7084 case BUILT_IN_FABSD128:
7085 target = expand_builtin_fabs (exp, target, subtarget);
7086 if (target)
7087 return target;
7088 break;
7090 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7091 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7092 target = expand_builtin_copysign (exp, target, subtarget);
7093 if (target)
7094 return target;
7095 break;
7097 /* Just do a normal library call if we were unable to fold
7098 the values. */
7099 CASE_FLT_FN (BUILT_IN_CABS):
7100 break;
7102 CASE_FLT_FN (BUILT_IN_FMA):
7103 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7104 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7105 if (target)
7106 return target;
7107 break;
7109 CASE_FLT_FN (BUILT_IN_ILOGB):
7110 if (! flag_unsafe_math_optimizations)
7111 break;
7112 gcc_fallthrough ();
7113 CASE_FLT_FN (BUILT_IN_ISINF):
7114 CASE_FLT_FN (BUILT_IN_FINITE):
7115 case BUILT_IN_ISFINITE:
7116 case BUILT_IN_ISNORMAL:
7117 target = expand_builtin_interclass_mathfn (exp, target);
7118 if (target)
7119 return target;
7120 break;
7122 CASE_FLT_FN (BUILT_IN_ICEIL):
7123 CASE_FLT_FN (BUILT_IN_LCEIL):
7124 CASE_FLT_FN (BUILT_IN_LLCEIL):
7125 CASE_FLT_FN (BUILT_IN_LFLOOR):
7126 CASE_FLT_FN (BUILT_IN_IFLOOR):
7127 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7128 target = expand_builtin_int_roundingfn (exp, target);
7129 if (target)
7130 return target;
7131 break;
7133 CASE_FLT_FN (BUILT_IN_IRINT):
7134 CASE_FLT_FN (BUILT_IN_LRINT):
7135 CASE_FLT_FN (BUILT_IN_LLRINT):
7136 CASE_FLT_FN (BUILT_IN_IROUND):
7137 CASE_FLT_FN (BUILT_IN_LROUND):
7138 CASE_FLT_FN (BUILT_IN_LLROUND):
7139 target = expand_builtin_int_roundingfn_2 (exp, target);
7140 if (target)
7141 return target;
7142 break;
7144 CASE_FLT_FN (BUILT_IN_POWI):
7145 target = expand_builtin_powi (exp, target);
7146 if (target)
7147 return target;
7148 break;
7150 CASE_FLT_FN (BUILT_IN_CEXPI):
7151 target = expand_builtin_cexpi (exp, target);
7152 gcc_assert (target);
7153 return target;
7155 CASE_FLT_FN (BUILT_IN_SIN):
7156 CASE_FLT_FN (BUILT_IN_COS):
7157 if (! flag_unsafe_math_optimizations)
7158 break;
7159 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7160 if (target)
7161 return target;
7162 break;
7164 CASE_FLT_FN (BUILT_IN_SINCOS):
7165 if (! flag_unsafe_math_optimizations)
7166 break;
7167 target = expand_builtin_sincos (exp);
7168 if (target)
7169 return target;
7170 break;
7172 case BUILT_IN_APPLY_ARGS:
7173 return expand_builtin_apply_args ();
7175 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7176 FUNCTION with a copy of the parameters described by
7177 ARGUMENTS, and ARGSIZE. It returns a block of memory
7178 allocated on the stack into which is stored all the registers
7179 that might possibly be used for returning the result of a
7180 function. ARGUMENTS is the value returned by
7181 __builtin_apply_args. ARGSIZE is the number of bytes of
7182 arguments that must be copied. ??? How should this value be
7183 computed? We'll also need a safe worst case value for varargs
7184 functions. */
7185 case BUILT_IN_APPLY:
7186 if (!validate_arglist (exp, POINTER_TYPE,
7187 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7188 && !validate_arglist (exp, REFERENCE_TYPE,
7189 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7190 return const0_rtx;
7191 else
7193 rtx ops[3];
7195 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7196 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7197 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7199 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7202 /* __builtin_return (RESULT) causes the function to return the
7203 value described by RESULT. RESULT is address of the block of
7204 memory returned by __builtin_apply. */
7205 case BUILT_IN_RETURN:
7206 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7207 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7208 return const0_rtx;
7210 case BUILT_IN_SAVEREGS:
7211 return expand_builtin_saveregs ();
7213 case BUILT_IN_VA_ARG_PACK:
7214 /* All valid uses of __builtin_va_arg_pack () are removed during
7215 inlining. */
7216 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7217 return const0_rtx;
7219 case BUILT_IN_VA_ARG_PACK_LEN:
7220 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7221 inlining. */
7222 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7223 return const0_rtx;
7225 /* Return the address of the first anonymous stack arg. */
7226 case BUILT_IN_NEXT_ARG:
7227 if (fold_builtin_next_arg (exp, false))
7228 return const0_rtx;
7229 return expand_builtin_next_arg ();
7231 case BUILT_IN_CLEAR_CACHE:
7232 target = expand_builtin___clear_cache (exp);
7233 if (target)
7234 return target;
7235 break;
7237 case BUILT_IN_CLASSIFY_TYPE:
7238 return expand_builtin_classify_type (exp);
7240 case BUILT_IN_CONSTANT_P:
7241 return const0_rtx;
7243 case BUILT_IN_FRAME_ADDRESS:
7244 case BUILT_IN_RETURN_ADDRESS:
7245 return expand_builtin_frame_address (fndecl, exp);
7247 /* Returns the address of the area where the structure is returned.
7248 0 otherwise. */
7249 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7250 if (call_expr_nargs (exp) != 0
7251 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7252 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7253 return const0_rtx;
7254 else
7255 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7257 CASE_BUILT_IN_ALLOCA:
7258 target = expand_builtin_alloca (exp);
7259 if (target)
7260 return target;
7261 break;
7263 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7264 return expand_asan_emit_allocas_unpoison (exp);
7266 case BUILT_IN_STACK_SAVE:
7267 return expand_stack_save ();
7269 case BUILT_IN_STACK_RESTORE:
7270 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7271 return const0_rtx;
7273 case BUILT_IN_BSWAP16:
7274 case BUILT_IN_BSWAP32:
7275 case BUILT_IN_BSWAP64:
7276 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7277 if (target)
7278 return target;
7279 break;
7281 CASE_INT_FN (BUILT_IN_FFS):
7282 target = expand_builtin_unop (target_mode, exp, target,
7283 subtarget, ffs_optab);
7284 if (target)
7285 return target;
7286 break;
7288 CASE_INT_FN (BUILT_IN_CLZ):
7289 target = expand_builtin_unop (target_mode, exp, target,
7290 subtarget, clz_optab);
7291 if (target)
7292 return target;
7293 break;
7295 CASE_INT_FN (BUILT_IN_CTZ):
7296 target = expand_builtin_unop (target_mode, exp, target,
7297 subtarget, ctz_optab);
7298 if (target)
7299 return target;
7300 break;
7302 CASE_INT_FN (BUILT_IN_CLRSB):
7303 target = expand_builtin_unop (target_mode, exp, target,
7304 subtarget, clrsb_optab);
7305 if (target)
7306 return target;
7307 break;
7309 CASE_INT_FN (BUILT_IN_POPCOUNT):
7310 target = expand_builtin_unop (target_mode, exp, target,
7311 subtarget, popcount_optab);
7312 if (target)
7313 return target;
7314 break;
7316 CASE_INT_FN (BUILT_IN_PARITY):
7317 target = expand_builtin_unop (target_mode, exp, target,
7318 subtarget, parity_optab);
7319 if (target)
7320 return target;
7321 break;
7323 case BUILT_IN_STRLEN:
7324 target = expand_builtin_strlen (exp, target, target_mode);
7325 if (target)
7326 return target;
7327 break;
7329 case BUILT_IN_STRNLEN:
7330 target = expand_builtin_strnlen (exp, target, target_mode);
7331 if (target)
7332 return target;
7333 break;
7335 case BUILT_IN_STRCAT:
7336 target = expand_builtin_strcat (exp, target);
7337 if (target)
7338 return target;
7339 break;
7341 case BUILT_IN_STRCPY:
7342 target = expand_builtin_strcpy (exp, target);
7343 if (target)
7344 return target;
7345 break;
7347 case BUILT_IN_STRNCAT:
7348 target = expand_builtin_strncat (exp, target);
7349 if (target)
7350 return target;
7351 break;
7353 case BUILT_IN_STRNCPY:
7354 target = expand_builtin_strncpy (exp, target);
7355 if (target)
7356 return target;
7357 break;
7359 case BUILT_IN_STPCPY:
7360 target = expand_builtin_stpcpy (exp, target, mode);
7361 if (target)
7362 return target;
7363 break;
7365 case BUILT_IN_STPNCPY:
7366 target = expand_builtin_stpncpy (exp, target);
7367 if (target)
7368 return target;
7369 break;
7371 case BUILT_IN_MEMCHR:
7372 target = expand_builtin_memchr (exp, target);
7373 if (target)
7374 return target;
7375 break;
7377 case BUILT_IN_MEMCPY:
7378 target = expand_builtin_memcpy (exp, target);
7379 if (target)
7380 return target;
7381 break;
7383 case BUILT_IN_MEMMOVE:
7384 target = expand_builtin_memmove (exp, target);
7385 if (target)
7386 return target;
7387 break;
7389 case BUILT_IN_MEMPCPY:
7390 target = expand_builtin_mempcpy (exp, target);
7391 if (target)
7392 return target;
7393 break;
7395 case BUILT_IN_MEMSET:
7396 target = expand_builtin_memset (exp, target, mode);
7397 if (target)
7398 return target;
7399 break;
7401 case BUILT_IN_BZERO:
7402 target = expand_builtin_bzero (exp);
7403 if (target)
7404 return target;
7405 break;
7407 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7408 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7409 when changing it to a strcmp call. */
7410 case BUILT_IN_STRCMP_EQ:
7411 target = expand_builtin_memcmp (exp, target, true);
7412 if (target)
7413 return target;
7415 /* Change this call back to a BUILT_IN_STRCMP. */
7416 TREE_OPERAND (exp, 1)
7417 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7419 /* Delete the last parameter. */
7420 unsigned int i;
7421 vec<tree, va_gc> *arg_vec;
7422 vec_alloc (arg_vec, 2);
7423 for (i = 0; i < 2; i++)
7424 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7425 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7426 /* FALLTHROUGH */
7428 case BUILT_IN_STRCMP:
7429 target = expand_builtin_strcmp (exp, target);
7430 if (target)
7431 return target;
7432 break;
7434 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7435 back to a BUILT_IN_STRNCMP. */
7436 case BUILT_IN_STRNCMP_EQ:
7437 target = expand_builtin_memcmp (exp, target, true);
7438 if (target)
7439 return target;
7441 /* Change it back to a BUILT_IN_STRNCMP. */
7442 TREE_OPERAND (exp, 1)
7443 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7444 /* FALLTHROUGH */
7446 case BUILT_IN_STRNCMP:
7447 target = expand_builtin_strncmp (exp, target, mode);
7448 if (target)
7449 return target;
7450 break;
7452 case BUILT_IN_BCMP:
7453 case BUILT_IN_MEMCMP:
7454 case BUILT_IN_MEMCMP_EQ:
7455 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7456 if (target)
7457 return target;
7458 if (fcode == BUILT_IN_MEMCMP_EQ)
7460 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7461 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7463 break;
7465 case BUILT_IN_SETJMP:
7466 /* This should have been lowered to the builtins below. */
7467 gcc_unreachable ();
7469 case BUILT_IN_SETJMP_SETUP:
7470 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7471 and the receiver label. */
7472 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7474 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7475 VOIDmode, EXPAND_NORMAL);
7476 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7477 rtx_insn *label_r = label_rtx (label);
7479 /* This is copied from the handling of non-local gotos. */
7480 expand_builtin_setjmp_setup (buf_addr, label_r);
7481 nonlocal_goto_handler_labels
7482 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7483 nonlocal_goto_handler_labels);
7484 /* ??? Do not let expand_label treat us as such since we would
7485 not want to be both on the list of non-local labels and on
7486 the list of forced labels. */
7487 FORCED_LABEL (label) = 0;
7488 return const0_rtx;
7490 break;
7492 case BUILT_IN_SETJMP_RECEIVER:
7493 /* __builtin_setjmp_receiver is passed the receiver label. */
7494 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7496 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7497 rtx_insn *label_r = label_rtx (label);
7499 expand_builtin_setjmp_receiver (label_r);
7500 return const0_rtx;
7502 break;
7504 /* __builtin_longjmp is passed a pointer to an array of five words.
7505 It's similar to the C library longjmp function but works with
7506 __builtin_setjmp above. */
7507 case BUILT_IN_LONGJMP:
7508 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7510 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7511 VOIDmode, EXPAND_NORMAL);
7512 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7514 if (value != const1_rtx)
7516 error ("%<__builtin_longjmp%> second argument must be 1");
7517 return const0_rtx;
7520 expand_builtin_longjmp (buf_addr, value);
7521 return const0_rtx;
7523 break;
7525 case BUILT_IN_NONLOCAL_GOTO:
7526 target = expand_builtin_nonlocal_goto (exp);
7527 if (target)
7528 return target;
7529 break;
7531 /* This updates the setjmp buffer that is its argument with the value
7532 of the current stack pointer. */
7533 case BUILT_IN_UPDATE_SETJMP_BUF:
7534 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7536 rtx buf_addr
7537 = expand_normal (CALL_EXPR_ARG (exp, 0));
7539 expand_builtin_update_setjmp_buf (buf_addr);
7540 return const0_rtx;
7542 break;
7544 case BUILT_IN_TRAP:
7545 expand_builtin_trap ();
7546 return const0_rtx;
7548 case BUILT_IN_UNREACHABLE:
7549 expand_builtin_unreachable ();
7550 return const0_rtx;
7552 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7553 case BUILT_IN_SIGNBITD32:
7554 case BUILT_IN_SIGNBITD64:
7555 case BUILT_IN_SIGNBITD128:
7556 target = expand_builtin_signbit (exp, target);
7557 if (target)
7558 return target;
7559 break;
7561 /* Various hooks for the DWARF 2 __throw routine. */
7562 case BUILT_IN_UNWIND_INIT:
7563 expand_builtin_unwind_init ();
7564 return const0_rtx;
7565 case BUILT_IN_DWARF_CFA:
7566 return virtual_cfa_rtx;
7567 #ifdef DWARF2_UNWIND_INFO
7568 case BUILT_IN_DWARF_SP_COLUMN:
7569 return expand_builtin_dwarf_sp_column ();
7570 case BUILT_IN_INIT_DWARF_REG_SIZES:
7571 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7572 return const0_rtx;
7573 #endif
7574 case BUILT_IN_FROB_RETURN_ADDR:
7575 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7576 case BUILT_IN_EXTRACT_RETURN_ADDR:
7577 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7578 case BUILT_IN_EH_RETURN:
7579 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7580 CALL_EXPR_ARG (exp, 1));
7581 return const0_rtx;
7582 case BUILT_IN_EH_RETURN_DATA_REGNO:
7583 return expand_builtin_eh_return_data_regno (exp);
7584 case BUILT_IN_EXTEND_POINTER:
7585 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7586 case BUILT_IN_EH_POINTER:
7587 return expand_builtin_eh_pointer (exp);
7588 case BUILT_IN_EH_FILTER:
7589 return expand_builtin_eh_filter (exp);
7590 case BUILT_IN_EH_COPY_VALUES:
7591 return expand_builtin_eh_copy_values (exp);
7593 case BUILT_IN_VA_START:
7594 return expand_builtin_va_start (exp);
7595 case BUILT_IN_VA_END:
7596 return expand_builtin_va_end (exp);
7597 case BUILT_IN_VA_COPY:
7598 return expand_builtin_va_copy (exp);
7599 case BUILT_IN_EXPECT:
7600 return expand_builtin_expect (exp, target);
7601 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7602 return expand_builtin_expect_with_probability (exp, target);
7603 case BUILT_IN_ASSUME_ALIGNED:
7604 return expand_builtin_assume_aligned (exp, target);
7605 case BUILT_IN_PREFETCH:
7606 expand_builtin_prefetch (exp);
7607 return const0_rtx;
7609 case BUILT_IN_INIT_TRAMPOLINE:
7610 return expand_builtin_init_trampoline (exp, true);
7611 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7612 return expand_builtin_init_trampoline (exp, false);
7613 case BUILT_IN_ADJUST_TRAMPOLINE:
7614 return expand_builtin_adjust_trampoline (exp);
7616 case BUILT_IN_INIT_DESCRIPTOR:
7617 return expand_builtin_init_descriptor (exp);
7618 case BUILT_IN_ADJUST_DESCRIPTOR:
7619 return expand_builtin_adjust_descriptor (exp);
7621 case BUILT_IN_FORK:
7622 case BUILT_IN_EXECL:
7623 case BUILT_IN_EXECV:
7624 case BUILT_IN_EXECLP:
7625 case BUILT_IN_EXECLE:
7626 case BUILT_IN_EXECVP:
7627 case BUILT_IN_EXECVE:
7628 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7629 if (target)
7630 return target;
7631 break;
7633 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7634 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7635 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7636 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7637 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7638 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7639 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7640 if (target)
7641 return target;
7642 break;
7644 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7645 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7646 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7647 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7648 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7649 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7650 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7651 if (target)
7652 return target;
7653 break;
7655 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7656 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7657 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7658 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7659 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7660 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7661 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7662 if (target)
7663 return target;
7664 break;
7666 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7667 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7668 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7669 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7670 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7671 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7672 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7673 if (target)
7674 return target;
7675 break;
7677 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7678 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7679 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7680 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7681 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7682 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7683 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7684 if (target)
7685 return target;
7686 break;
7688 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7689 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7690 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7691 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7692 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7693 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7694 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7695 if (target)
7696 return target;
7697 break;
7699 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7700 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7701 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7702 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7703 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7704 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7705 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7706 if (target)
7707 return target;
7708 break;
7710 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7711 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7712 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7713 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7714 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7715 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7716 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7717 if (target)
7718 return target;
7719 break;
7721 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7722 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7723 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7724 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7725 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7726 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7727 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7728 if (target)
7729 return target;
7730 break;
7732 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7733 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7734 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7735 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7736 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7737 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7738 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7739 if (target)
7740 return target;
7741 break;
7743 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7744 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7745 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7746 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7747 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7748 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7749 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7750 if (target)
7751 return target;
7752 break;
7754 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7755 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7756 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7757 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7758 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7759 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7760 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7761 if (target)
7762 return target;
7763 break;
7765 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7766 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7767 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7768 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7769 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7770 if (mode == VOIDmode)
7771 mode = TYPE_MODE (boolean_type_node);
7772 if (!target || !register_operand (target, mode))
7773 target = gen_reg_rtx (mode);
7775 mode = get_builtin_sync_mode
7776 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7777 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7778 if (target)
7779 return target;
7780 break;
7782 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7783 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7784 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7785 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7786 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7787 mode = get_builtin_sync_mode
7788 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7789 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7790 if (target)
7791 return target;
7792 break;
7794 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7795 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7796 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7797 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7798 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7799 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7800 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7801 if (target)
7802 return target;
7803 break;
7805 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7806 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7807 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7808 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7809 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7810 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7811 expand_builtin_sync_lock_release (mode, exp);
7812 return const0_rtx;
7814 case BUILT_IN_SYNC_SYNCHRONIZE:
7815 expand_builtin_sync_synchronize ();
7816 return const0_rtx;
7818 case BUILT_IN_ATOMIC_EXCHANGE_1:
7819 case BUILT_IN_ATOMIC_EXCHANGE_2:
7820 case BUILT_IN_ATOMIC_EXCHANGE_4:
7821 case BUILT_IN_ATOMIC_EXCHANGE_8:
7822 case BUILT_IN_ATOMIC_EXCHANGE_16:
7823 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7824 target = expand_builtin_atomic_exchange (mode, exp, target);
7825 if (target)
7826 return target;
7827 break;
7829 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7830 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7831 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7832 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7833 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7835 unsigned int nargs, z;
7836 vec<tree, va_gc> *vec;
7838 mode =
7839 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7840 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7841 if (target)
7842 return target;
7844 /* If this is turned into an external library call, the weak parameter
7845 must be dropped to match the expected parameter list. */
7846 nargs = call_expr_nargs (exp);
7847 vec_alloc (vec, nargs - 1);
7848 for (z = 0; z < 3; z++)
7849 vec->quick_push (CALL_EXPR_ARG (exp, z));
7850 /* Skip the boolean weak parameter. */
7851 for (z = 4; z < 6; z++)
7852 vec->quick_push (CALL_EXPR_ARG (exp, z));
7853 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7854 break;
7857 case BUILT_IN_ATOMIC_LOAD_1:
7858 case BUILT_IN_ATOMIC_LOAD_2:
7859 case BUILT_IN_ATOMIC_LOAD_4:
7860 case BUILT_IN_ATOMIC_LOAD_8:
7861 case BUILT_IN_ATOMIC_LOAD_16:
7862 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7863 target = expand_builtin_atomic_load (mode, exp, target);
7864 if (target)
7865 return target;
7866 break;
7868 case BUILT_IN_ATOMIC_STORE_1:
7869 case BUILT_IN_ATOMIC_STORE_2:
7870 case BUILT_IN_ATOMIC_STORE_4:
7871 case BUILT_IN_ATOMIC_STORE_8:
7872 case BUILT_IN_ATOMIC_STORE_16:
7873 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7874 target = expand_builtin_atomic_store (mode, exp);
7875 if (target)
7876 return const0_rtx;
7877 break;
7879 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7880 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7881 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7882 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7883 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7885 enum built_in_function lib;
7886 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7887 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7888 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7889 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7890 ignore, lib);
7891 if (target)
7892 return target;
7893 break;
7895 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7896 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7897 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7898 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7899 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7901 enum built_in_function lib;
7902 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7903 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7904 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7905 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7906 ignore, lib);
7907 if (target)
7908 return target;
7909 break;
7911 case BUILT_IN_ATOMIC_AND_FETCH_1:
7912 case BUILT_IN_ATOMIC_AND_FETCH_2:
7913 case BUILT_IN_ATOMIC_AND_FETCH_4:
7914 case BUILT_IN_ATOMIC_AND_FETCH_8:
7915 case BUILT_IN_ATOMIC_AND_FETCH_16:
7917 enum built_in_function lib;
7918 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7919 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7920 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7921 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7922 ignore, lib);
7923 if (target)
7924 return target;
7925 break;
7927 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7928 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7929 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7930 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7931 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7933 enum built_in_function lib;
7934 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7935 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7936 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7937 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7938 ignore, lib);
7939 if (target)
7940 return target;
7941 break;
7943 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7944 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7945 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7946 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7947 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7949 enum built_in_function lib;
7950 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7951 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7952 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7953 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7954 ignore, lib);
7955 if (target)
7956 return target;
7957 break;
7959 case BUILT_IN_ATOMIC_OR_FETCH_1:
7960 case BUILT_IN_ATOMIC_OR_FETCH_2:
7961 case BUILT_IN_ATOMIC_OR_FETCH_4:
7962 case BUILT_IN_ATOMIC_OR_FETCH_8:
7963 case BUILT_IN_ATOMIC_OR_FETCH_16:
7965 enum built_in_function lib;
7966 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7967 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7968 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7969 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7970 ignore, lib);
7971 if (target)
7972 return target;
7973 break;
7975 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7976 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7977 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7978 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7979 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7980 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7981 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7982 ignore, BUILT_IN_NONE);
7983 if (target)
7984 return target;
7985 break;
7987 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7988 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7989 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7990 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7991 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7992 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7993 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7994 ignore, BUILT_IN_NONE);
7995 if (target)
7996 return target;
7997 break;
7999 case BUILT_IN_ATOMIC_FETCH_AND_1:
8000 case BUILT_IN_ATOMIC_FETCH_AND_2:
8001 case BUILT_IN_ATOMIC_FETCH_AND_4:
8002 case BUILT_IN_ATOMIC_FETCH_AND_8:
8003 case BUILT_IN_ATOMIC_FETCH_AND_16:
8004 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8005 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8006 ignore, BUILT_IN_NONE);
8007 if (target)
8008 return target;
8009 break;
8011 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8012 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8013 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8014 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8015 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8016 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8017 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8018 ignore, BUILT_IN_NONE);
8019 if (target)
8020 return target;
8021 break;
8023 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8024 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8025 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8026 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8027 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8028 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8029 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8030 ignore, BUILT_IN_NONE);
8031 if (target)
8032 return target;
8033 break;
8035 case BUILT_IN_ATOMIC_FETCH_OR_1:
8036 case BUILT_IN_ATOMIC_FETCH_OR_2:
8037 case BUILT_IN_ATOMIC_FETCH_OR_4:
8038 case BUILT_IN_ATOMIC_FETCH_OR_8:
8039 case BUILT_IN_ATOMIC_FETCH_OR_16:
8040 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8041 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8042 ignore, BUILT_IN_NONE);
8043 if (target)
8044 return target;
8045 break;
8047 case BUILT_IN_ATOMIC_TEST_AND_SET:
8048 return expand_builtin_atomic_test_and_set (exp, target);
8050 case BUILT_IN_ATOMIC_CLEAR:
8051 return expand_builtin_atomic_clear (exp);
8053 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8054 return expand_builtin_atomic_always_lock_free (exp);
8056 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8057 target = expand_builtin_atomic_is_lock_free (exp);
8058 if (target)
8059 return target;
8060 break;
8062 case BUILT_IN_ATOMIC_THREAD_FENCE:
8063 expand_builtin_atomic_thread_fence (exp);
8064 return const0_rtx;
8066 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8067 expand_builtin_atomic_signal_fence (exp);
8068 return const0_rtx;
8070 case BUILT_IN_OBJECT_SIZE:
8071 return expand_builtin_object_size (exp);
8073 case BUILT_IN_MEMCPY_CHK:
8074 case BUILT_IN_MEMPCPY_CHK:
8075 case BUILT_IN_MEMMOVE_CHK:
8076 case BUILT_IN_MEMSET_CHK:
8077 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8078 if (target)
8079 return target;
8080 break;
8082 case BUILT_IN_STRCPY_CHK:
8083 case BUILT_IN_STPCPY_CHK:
8084 case BUILT_IN_STRNCPY_CHK:
8085 case BUILT_IN_STPNCPY_CHK:
8086 case BUILT_IN_STRCAT_CHK:
8087 case BUILT_IN_STRNCAT_CHK:
8088 case BUILT_IN_SNPRINTF_CHK:
8089 case BUILT_IN_VSNPRINTF_CHK:
8090 maybe_emit_chk_warning (exp, fcode);
8091 break;
8093 case BUILT_IN_SPRINTF_CHK:
8094 case BUILT_IN_VSPRINTF_CHK:
8095 maybe_emit_sprintf_chk_warning (exp, fcode);
8096 break;
8098 case BUILT_IN_FREE:
8099 if (warn_free_nonheap_object)
8100 maybe_emit_free_warning (exp);
8101 break;
8103 case BUILT_IN_THREAD_POINTER:
8104 return expand_builtin_thread_pointer (exp, target);
8106 case BUILT_IN_SET_THREAD_POINTER:
8107 expand_builtin_set_thread_pointer (exp);
8108 return const0_rtx;
8110 case BUILT_IN_ACC_ON_DEVICE:
8111 /* Do library call, if we failed to expand the builtin when
8112 folding. */
8113 break;
8115 case BUILT_IN_GOACC_PARLEVEL_ID:
8116 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8117 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8119 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8120 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8122 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8123 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8124 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8125 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8126 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8127 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8128 return expand_speculation_safe_value (mode, exp, target, ignore);
8130 default: /* just do library call, if unknown builtin */
8131 break;
8134 /* The switch statement above can drop through to cause the function
8135 to be called normally. */
8136 return expand_call (exp, target, ignore);
8139 /* Determine whether a tree node represents a call to a built-in
8140 function. If the tree T is a call to a built-in function with
8141 the right number of arguments of the appropriate types, return
8142 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8143 Otherwise the return value is END_BUILTINS. */
8145 enum built_in_function
8146 builtin_mathfn_code (const_tree t)
8148 const_tree fndecl, arg, parmlist;
8149 const_tree argtype, parmtype;
8150 const_call_expr_arg_iterator iter;
8152 if (TREE_CODE (t) != CALL_EXPR)
8153 return END_BUILTINS;
8155 fndecl = get_callee_fndecl (t);
8156 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8157 return END_BUILTINS;
8159 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8160 init_const_call_expr_arg_iterator (t, &iter);
8161 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8163 /* If a function doesn't take a variable number of arguments,
8164 the last element in the list will have type `void'. */
8165 parmtype = TREE_VALUE (parmlist);
8166 if (VOID_TYPE_P (parmtype))
8168 if (more_const_call_expr_args_p (&iter))
8169 return END_BUILTINS;
8170 return DECL_FUNCTION_CODE (fndecl);
8173 if (! more_const_call_expr_args_p (&iter))
8174 return END_BUILTINS;
8176 arg = next_const_call_expr_arg (&iter);
8177 argtype = TREE_TYPE (arg);
8179 if (SCALAR_FLOAT_TYPE_P (parmtype))
8181 if (! SCALAR_FLOAT_TYPE_P (argtype))
8182 return END_BUILTINS;
8184 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8186 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8187 return END_BUILTINS;
8189 else if (POINTER_TYPE_P (parmtype))
8191 if (! POINTER_TYPE_P (argtype))
8192 return END_BUILTINS;
8194 else if (INTEGRAL_TYPE_P (parmtype))
8196 if (! INTEGRAL_TYPE_P (argtype))
8197 return END_BUILTINS;
8199 else
8200 return END_BUILTINS;
8203 /* Variable-length argument list. */
8204 return DECL_FUNCTION_CODE (fndecl);
8207 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8208 evaluate to a constant. */
8210 static tree
8211 fold_builtin_constant_p (tree arg)
8213 /* We return 1 for a numeric type that's known to be a constant
8214 value at compile-time or for an aggregate type that's a
8215 literal constant. */
8216 STRIP_NOPS (arg);
8218 /* If we know this is a constant, emit the constant of one. */
8219 if (CONSTANT_CLASS_P (arg)
8220 || (TREE_CODE (arg) == CONSTRUCTOR
8221 && TREE_CONSTANT (arg)))
8222 return integer_one_node;
8223 if (TREE_CODE (arg) == ADDR_EXPR)
8225 tree op = TREE_OPERAND (arg, 0);
8226 if (TREE_CODE (op) == STRING_CST
8227 || (TREE_CODE (op) == ARRAY_REF
8228 && integer_zerop (TREE_OPERAND (op, 1))
8229 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8230 return integer_one_node;
8233 /* If this expression has side effects, show we don't know it to be a
8234 constant. Likewise if it's a pointer or aggregate type since in
8235 those case we only want literals, since those are only optimized
8236 when generating RTL, not later.
8237 And finally, if we are compiling an initializer, not code, we
8238 need to return a definite result now; there's not going to be any
8239 more optimization done. */
8240 if (TREE_SIDE_EFFECTS (arg)
8241 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8242 || POINTER_TYPE_P (TREE_TYPE (arg))
8243 || cfun == 0
8244 || folding_initializer
8245 || force_folding_builtin_constant_p)
8246 return integer_zero_node;
8248 return NULL_TREE;
8251 /* Create builtin_expect or builtin_expect_with_probability
8252 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8253 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8254 builtin_expect_with_probability instead uses third argument as PROBABILITY
8255 value. */
8257 static tree
8258 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8259 tree predictor, tree probability)
8261 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8263 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8264 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8265 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8266 ret_type = TREE_TYPE (TREE_TYPE (fn));
8267 pred_type = TREE_VALUE (arg_types);
8268 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8270 pred = fold_convert_loc (loc, pred_type, pred);
8271 expected = fold_convert_loc (loc, expected_type, expected);
8273 if (probability)
8274 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8275 else
8276 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8277 predictor);
8279 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8280 build_int_cst (ret_type, 0));
8283 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8284 NULL_TREE if no simplification is possible. */
8286 tree
8287 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8288 tree arg3)
8290 tree inner, fndecl, inner_arg0;
8291 enum tree_code code;
8293 /* Distribute the expected value over short-circuiting operators.
8294 See through the cast from truthvalue_type_node to long. */
8295 inner_arg0 = arg0;
8296 while (CONVERT_EXPR_P (inner_arg0)
8297 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8298 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8299 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8301 /* If this is a builtin_expect within a builtin_expect keep the
8302 inner one. See through a comparison against a constant. It
8303 might have been added to create a thruthvalue. */
8304 inner = inner_arg0;
8306 if (COMPARISON_CLASS_P (inner)
8307 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8308 inner = TREE_OPERAND (inner, 0);
8310 if (TREE_CODE (inner) == CALL_EXPR
8311 && (fndecl = get_callee_fndecl (inner))
8312 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8313 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8314 return arg0;
8316 inner = inner_arg0;
8317 code = TREE_CODE (inner);
8318 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8320 tree op0 = TREE_OPERAND (inner, 0);
8321 tree op1 = TREE_OPERAND (inner, 1);
8322 arg1 = save_expr (arg1);
8324 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8325 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8326 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8328 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8331 /* If the argument isn't invariant then there's nothing else we can do. */
8332 if (!TREE_CONSTANT (inner_arg0))
8333 return NULL_TREE;
8335 /* If we expect that a comparison against the argument will fold to
8336 a constant return the constant. In practice, this means a true
8337 constant or the address of a non-weak symbol. */
8338 inner = inner_arg0;
8339 STRIP_NOPS (inner);
8340 if (TREE_CODE (inner) == ADDR_EXPR)
8344 inner = TREE_OPERAND (inner, 0);
8346 while (TREE_CODE (inner) == COMPONENT_REF
8347 || TREE_CODE (inner) == ARRAY_REF);
8348 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8349 return NULL_TREE;
8352 /* Otherwise, ARG0 already has the proper type for the return value. */
8353 return arg0;
8356 /* Fold a call to __builtin_classify_type with argument ARG. */
8358 static tree
8359 fold_builtin_classify_type (tree arg)
8361 if (arg == 0)
8362 return build_int_cst (integer_type_node, no_type_class);
8364 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8367 /* Fold a call to __builtin_strlen with argument ARG. */
8369 static tree
8370 fold_builtin_strlen (location_t loc, tree type, tree arg)
8372 if (!validate_arg (arg, POINTER_TYPE))
8373 return NULL_TREE;
8374 else
8376 tree len = c_strlen (arg, 0);
8378 if (len)
8379 return fold_convert_loc (loc, type, len);
8381 return NULL_TREE;
8385 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8387 static tree
8388 fold_builtin_inf (location_t loc, tree type, int warn)
8390 REAL_VALUE_TYPE real;
8392 /* __builtin_inff is intended to be usable to define INFINITY on all
8393 targets. If an infinity is not available, INFINITY expands "to a
8394 positive constant of type float that overflows at translation
8395 time", footnote "In this case, using INFINITY will violate the
8396 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8397 Thus we pedwarn to ensure this constraint violation is
8398 diagnosed. */
8399 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8400 pedwarn (loc, 0, "target format does not support infinity");
8402 real_inf (&real);
8403 return build_real (type, real);
8406 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8407 NULL_TREE if no simplification can be made. */
8409 static tree
8410 fold_builtin_sincos (location_t loc,
8411 tree arg0, tree arg1, tree arg2)
8413 tree type;
8414 tree fndecl, call = NULL_TREE;
8416 if (!validate_arg (arg0, REAL_TYPE)
8417 || !validate_arg (arg1, POINTER_TYPE)
8418 || !validate_arg (arg2, POINTER_TYPE))
8419 return NULL_TREE;
8421 type = TREE_TYPE (arg0);
8423 /* Calculate the result when the argument is a constant. */
8424 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8425 if (fn == END_BUILTINS)
8426 return NULL_TREE;
8428 /* Canonicalize sincos to cexpi. */
8429 if (TREE_CODE (arg0) == REAL_CST)
8431 tree complex_type = build_complex_type (type);
8432 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8434 if (!call)
8436 if (!targetm.libc_has_function (function_c99_math_complex)
8437 || !builtin_decl_implicit_p (fn))
8438 return NULL_TREE;
8439 fndecl = builtin_decl_explicit (fn);
8440 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8441 call = builtin_save_expr (call);
8444 tree ptype = build_pointer_type (type);
8445 arg1 = fold_convert (ptype, arg1);
8446 arg2 = fold_convert (ptype, arg2);
8447 return build2 (COMPOUND_EXPR, void_type_node,
8448 build2 (MODIFY_EXPR, void_type_node,
8449 build_fold_indirect_ref_loc (loc, arg1),
8450 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8451 build2 (MODIFY_EXPR, void_type_node,
8452 build_fold_indirect_ref_loc (loc, arg2),
8453 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8456 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8457 Return NULL_TREE if no simplification can be made. */
8459 static tree
8460 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8462 if (!validate_arg (arg1, POINTER_TYPE)
8463 || !validate_arg (arg2, POINTER_TYPE)
8464 || !validate_arg (len, INTEGER_TYPE))
8465 return NULL_TREE;
8467 /* If the LEN parameter is zero, return zero. */
8468 if (integer_zerop (len))
8469 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8470 arg1, arg2);
8472 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8473 if (operand_equal_p (arg1, arg2, 0))
8474 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8476 /* If len parameter is one, return an expression corresponding to
8477 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8478 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8480 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8481 tree cst_uchar_ptr_node
8482 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8484 tree ind1
8485 = fold_convert_loc (loc, integer_type_node,
8486 build1 (INDIRECT_REF, cst_uchar_node,
8487 fold_convert_loc (loc,
8488 cst_uchar_ptr_node,
8489 arg1)));
8490 tree ind2
8491 = fold_convert_loc (loc, integer_type_node,
8492 build1 (INDIRECT_REF, cst_uchar_node,
8493 fold_convert_loc (loc,
8494 cst_uchar_ptr_node,
8495 arg2)));
8496 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8499 return NULL_TREE;
8502 /* Fold a call to builtin isascii with argument ARG. */
8504 static tree
8505 fold_builtin_isascii (location_t loc, tree arg)
8507 if (!validate_arg (arg, INTEGER_TYPE))
8508 return NULL_TREE;
8509 else
8511 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8512 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8513 build_int_cst (integer_type_node,
8514 ~ (unsigned HOST_WIDE_INT) 0x7f));
8515 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8516 arg, integer_zero_node);
8520 /* Fold a call to builtin toascii with argument ARG. */
8522 static tree
8523 fold_builtin_toascii (location_t loc, tree arg)
8525 if (!validate_arg (arg, INTEGER_TYPE))
8526 return NULL_TREE;
8528 /* Transform toascii(c) -> (c & 0x7f). */
8529 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8530 build_int_cst (integer_type_node, 0x7f));
8533 /* Fold a call to builtin isdigit with argument ARG. */
8535 static tree
8536 fold_builtin_isdigit (location_t loc, tree arg)
8538 if (!validate_arg (arg, INTEGER_TYPE))
8539 return NULL_TREE;
8540 else
8542 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8543 /* According to the C standard, isdigit is unaffected by locale.
8544 However, it definitely is affected by the target character set. */
8545 unsigned HOST_WIDE_INT target_digit0
8546 = lang_hooks.to_target_charset ('0');
8548 if (target_digit0 == 0)
8549 return NULL_TREE;
8551 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8552 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8553 build_int_cst (unsigned_type_node, target_digit0));
8554 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8555 build_int_cst (unsigned_type_node, 9));
8559 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8561 static tree
8562 fold_builtin_fabs (location_t loc, tree arg, tree type)
8564 if (!validate_arg (arg, REAL_TYPE))
8565 return NULL_TREE;
8567 arg = fold_convert_loc (loc, type, arg);
8568 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8571 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8573 static tree
8574 fold_builtin_abs (location_t loc, tree arg, tree type)
8576 if (!validate_arg (arg, INTEGER_TYPE))
8577 return NULL_TREE;
8579 arg = fold_convert_loc (loc, type, arg);
8580 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8583 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8585 static tree
8586 fold_builtin_carg (location_t loc, tree arg, tree type)
8588 if (validate_arg (arg, COMPLEX_TYPE)
8589 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8591 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8593 if (atan2_fn)
8595 tree new_arg = builtin_save_expr (arg);
8596 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8597 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8598 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8602 return NULL_TREE;
8605 /* Fold a call to builtin frexp, we can assume the base is 2. */
8607 static tree
8608 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8610 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8611 return NULL_TREE;
8613 STRIP_NOPS (arg0);
8615 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8616 return NULL_TREE;
8618 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8620 /* Proceed if a valid pointer type was passed in. */
8621 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8623 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8624 tree frac, exp;
8626 switch (value->cl)
8628 case rvc_zero:
8629 /* For +-0, return (*exp = 0, +-0). */
8630 exp = integer_zero_node;
8631 frac = arg0;
8632 break;
8633 case rvc_nan:
8634 case rvc_inf:
8635 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8636 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8637 case rvc_normal:
8639 /* Since the frexp function always expects base 2, and in
8640 GCC normalized significands are already in the range
8641 [0.5, 1.0), we have exactly what frexp wants. */
8642 REAL_VALUE_TYPE frac_rvt = *value;
8643 SET_REAL_EXP (&frac_rvt, 0);
8644 frac = build_real (rettype, frac_rvt);
8645 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8647 break;
8648 default:
8649 gcc_unreachable ();
8652 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8653 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8654 TREE_SIDE_EFFECTS (arg1) = 1;
8655 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8658 return NULL_TREE;
8661 /* Fold a call to builtin modf. */
8663 static tree
8664 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8666 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8667 return NULL_TREE;
8669 STRIP_NOPS (arg0);
8671 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8672 return NULL_TREE;
8674 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8676 /* Proceed if a valid pointer type was passed in. */
8677 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8679 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8680 REAL_VALUE_TYPE trunc, frac;
8682 switch (value->cl)
8684 case rvc_nan:
8685 case rvc_zero:
8686 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8687 trunc = frac = *value;
8688 break;
8689 case rvc_inf:
8690 /* For +-Inf, return (*arg1 = arg0, +-0). */
8691 frac = dconst0;
8692 frac.sign = value->sign;
8693 trunc = *value;
8694 break;
8695 case rvc_normal:
8696 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8697 real_trunc (&trunc, VOIDmode, value);
8698 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8699 /* If the original number was negative and already
8700 integral, then the fractional part is -0.0. */
8701 if (value->sign && frac.cl == rvc_zero)
8702 frac.sign = value->sign;
8703 break;
8706 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8707 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8708 build_real (rettype, trunc));
8709 TREE_SIDE_EFFECTS (arg1) = 1;
8710 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8711 build_real (rettype, frac));
8714 return NULL_TREE;
8717 /* Given a location LOC, an interclass builtin function decl FNDECL
8718 and its single argument ARG, return an folded expression computing
8719 the same, or NULL_TREE if we either couldn't or didn't want to fold
8720 (the latter happen if there's an RTL instruction available). */
8722 static tree
8723 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8725 machine_mode mode;
8727 if (!validate_arg (arg, REAL_TYPE))
8728 return NULL_TREE;
8730 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8731 return NULL_TREE;
8733 mode = TYPE_MODE (TREE_TYPE (arg));
8735 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8737 /* If there is no optab, try generic code. */
8738 switch (DECL_FUNCTION_CODE (fndecl))
8740 tree result;
8742 CASE_FLT_FN (BUILT_IN_ISINF):
8744 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8745 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8746 tree type = TREE_TYPE (arg);
8747 REAL_VALUE_TYPE r;
8748 char buf[128];
8750 if (is_ibm_extended)
8752 /* NaN and Inf are encoded in the high-order double value
8753 only. The low-order value is not significant. */
8754 type = double_type_node;
8755 mode = DFmode;
8756 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8758 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8759 real_from_string (&r, buf);
8760 result = build_call_expr (isgr_fn, 2,
8761 fold_build1_loc (loc, ABS_EXPR, type, arg),
8762 build_real (type, r));
8763 return result;
8765 CASE_FLT_FN (BUILT_IN_FINITE):
8766 case BUILT_IN_ISFINITE:
8768 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8769 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8770 tree type = TREE_TYPE (arg);
8771 REAL_VALUE_TYPE r;
8772 char buf[128];
8774 if (is_ibm_extended)
8776 /* NaN and Inf are encoded in the high-order double value
8777 only. The low-order value is not significant. */
8778 type = double_type_node;
8779 mode = DFmode;
8780 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8782 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8783 real_from_string (&r, buf);
8784 result = build_call_expr (isle_fn, 2,
8785 fold_build1_loc (loc, ABS_EXPR, type, arg),
8786 build_real (type, r));
8787 /*result = fold_build2_loc (loc, UNGT_EXPR,
8788 TREE_TYPE (TREE_TYPE (fndecl)),
8789 fold_build1_loc (loc, ABS_EXPR, type, arg),
8790 build_real (type, r));
8791 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8792 TREE_TYPE (TREE_TYPE (fndecl)),
8793 result);*/
8794 return result;
8796 case BUILT_IN_ISNORMAL:
8798 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8799 islessequal(fabs(x),DBL_MAX). */
8800 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8801 tree type = TREE_TYPE (arg);
8802 tree orig_arg, max_exp, min_exp;
8803 machine_mode orig_mode = mode;
8804 REAL_VALUE_TYPE rmax, rmin;
8805 char buf[128];
8807 orig_arg = arg = builtin_save_expr (arg);
8808 if (is_ibm_extended)
8810 /* Use double to test the normal range of IBM extended
8811 precision. Emin for IBM extended precision is
8812 different to emin for IEEE double, being 53 higher
8813 since the low double exponent is at least 53 lower
8814 than the high double exponent. */
8815 type = double_type_node;
8816 mode = DFmode;
8817 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8819 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8821 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8822 real_from_string (&rmax, buf);
8823 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8824 real_from_string (&rmin, buf);
8825 max_exp = build_real (type, rmax);
8826 min_exp = build_real (type, rmin);
8828 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8829 if (is_ibm_extended)
8831 /* Testing the high end of the range is done just using
8832 the high double, using the same test as isfinite().
8833 For the subnormal end of the range we first test the
8834 high double, then if its magnitude is equal to the
8835 limit of 0x1p-969, we test whether the low double is
8836 non-zero and opposite sign to the high double. */
8837 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8838 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8839 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8840 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8841 arg, min_exp);
8842 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8843 complex_double_type_node, orig_arg);
8844 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8845 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8846 tree zero = build_real (type, dconst0);
8847 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8848 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8849 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8850 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8851 fold_build3 (COND_EXPR,
8852 integer_type_node,
8853 hilt, logt, lolt));
8854 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8855 eq_min, ok_lo);
8856 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8857 gt_min, eq_min);
8859 else
8861 tree const isge_fn
8862 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8863 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8865 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8866 max_exp, min_exp);
8867 return result;
8869 default:
8870 break;
8873 return NULL_TREE;
8876 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8877 ARG is the argument for the call. */
8879 static tree
8880 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8882 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8884 if (!validate_arg (arg, REAL_TYPE))
8885 return NULL_TREE;
8887 switch (builtin_index)
8889 case BUILT_IN_ISINF:
8890 if (!HONOR_INFINITIES (arg))
8891 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8893 return NULL_TREE;
8895 case BUILT_IN_ISINF_SIGN:
8897 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8898 /* In a boolean context, GCC will fold the inner COND_EXPR to
8899 1. So e.g. "if (isinf_sign(x))" would be folded to just
8900 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8901 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8902 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8903 tree tmp = NULL_TREE;
8905 arg = builtin_save_expr (arg);
8907 if (signbit_fn && isinf_fn)
8909 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8910 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8912 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8913 signbit_call, integer_zero_node);
8914 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8915 isinf_call, integer_zero_node);
8917 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8918 integer_minus_one_node, integer_one_node);
8919 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8920 isinf_call, tmp,
8921 integer_zero_node);
8924 return tmp;
8927 case BUILT_IN_ISFINITE:
8928 if (!HONOR_NANS (arg)
8929 && !HONOR_INFINITIES (arg))
8930 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8932 return NULL_TREE;
8934 case BUILT_IN_ISNAN:
8935 if (!HONOR_NANS (arg))
8936 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8939 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8940 if (is_ibm_extended)
8942 /* NaN and Inf are encoded in the high-order double value
8943 only. The low-order value is not significant. */
8944 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8947 arg = builtin_save_expr (arg);
8948 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8950 default:
8951 gcc_unreachable ();
8955 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8956 This builtin will generate code to return the appropriate floating
8957 point classification depending on the value of the floating point
8958 number passed in. The possible return values must be supplied as
8959 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8960 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8961 one floating point argument which is "type generic". */
8963 static tree
8964 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8966 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8967 arg, type, res, tmp;
8968 machine_mode mode;
8969 REAL_VALUE_TYPE r;
8970 char buf[128];
8972 /* Verify the required arguments in the original call. */
8973 if (nargs != 6
8974 || !validate_arg (args[0], INTEGER_TYPE)
8975 || !validate_arg (args[1], INTEGER_TYPE)
8976 || !validate_arg (args[2], INTEGER_TYPE)
8977 || !validate_arg (args[3], INTEGER_TYPE)
8978 || !validate_arg (args[4], INTEGER_TYPE)
8979 || !validate_arg (args[5], REAL_TYPE))
8980 return NULL_TREE;
8982 fp_nan = args[0];
8983 fp_infinite = args[1];
8984 fp_normal = args[2];
8985 fp_subnormal = args[3];
8986 fp_zero = args[4];
8987 arg = args[5];
8988 type = TREE_TYPE (arg);
8989 mode = TYPE_MODE (type);
8990 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8992 /* fpclassify(x) ->
8993 isnan(x) ? FP_NAN :
8994 (fabs(x) == Inf ? FP_INFINITE :
8995 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8996 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8998 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8999 build_real (type, dconst0));
9000 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9001 tmp, fp_zero, fp_subnormal);
9003 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9004 real_from_string (&r, buf);
9005 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9006 arg, build_real (type, r));
9007 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9009 if (HONOR_INFINITIES (mode))
9011 real_inf (&r);
9012 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9013 build_real (type, r));
9014 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9015 fp_infinite, res);
9018 if (HONOR_NANS (mode))
9020 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9021 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9024 return res;
9027 /* Fold a call to an unordered comparison function such as
9028 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9029 being called and ARG0 and ARG1 are the arguments for the call.
9030 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9031 the opposite of the desired result. UNORDERED_CODE is used
9032 for modes that can hold NaNs and ORDERED_CODE is used for
9033 the rest. */
9035 static tree
9036 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9037 enum tree_code unordered_code,
9038 enum tree_code ordered_code)
9040 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9041 enum tree_code code;
9042 tree type0, type1;
9043 enum tree_code code0, code1;
9044 tree cmp_type = NULL_TREE;
9046 type0 = TREE_TYPE (arg0);
9047 type1 = TREE_TYPE (arg1);
9049 code0 = TREE_CODE (type0);
9050 code1 = TREE_CODE (type1);
9052 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9053 /* Choose the wider of two real types. */
9054 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9055 ? type0 : type1;
9056 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9057 cmp_type = type0;
9058 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9059 cmp_type = type1;
9061 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9062 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9064 if (unordered_code == UNORDERED_EXPR)
9066 if (!HONOR_NANS (arg0))
9067 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9068 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9071 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9072 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9073 fold_build2_loc (loc, code, type, arg0, arg1));
9076 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9077 arithmetics if it can never overflow, or into internal functions that
9078 return both result of arithmetics and overflowed boolean flag in
9079 a complex integer result, or some other check for overflow.
9080 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9081 checking part of that. */
9083 static tree
9084 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9085 tree arg0, tree arg1, tree arg2)
9087 enum internal_fn ifn = IFN_LAST;
9088 /* The code of the expression corresponding to the type-generic
9089 built-in, or ERROR_MARK for the type-specific ones. */
9090 enum tree_code opcode = ERROR_MARK;
9091 bool ovf_only = false;
9093 switch (fcode)
9095 case BUILT_IN_ADD_OVERFLOW_P:
9096 ovf_only = true;
9097 /* FALLTHRU */
9098 case BUILT_IN_ADD_OVERFLOW:
9099 opcode = PLUS_EXPR;
9100 /* FALLTHRU */
9101 case BUILT_IN_SADD_OVERFLOW:
9102 case BUILT_IN_SADDL_OVERFLOW:
9103 case BUILT_IN_SADDLL_OVERFLOW:
9104 case BUILT_IN_UADD_OVERFLOW:
9105 case BUILT_IN_UADDL_OVERFLOW:
9106 case BUILT_IN_UADDLL_OVERFLOW:
9107 ifn = IFN_ADD_OVERFLOW;
9108 break;
9109 case BUILT_IN_SUB_OVERFLOW_P:
9110 ovf_only = true;
9111 /* FALLTHRU */
9112 case BUILT_IN_SUB_OVERFLOW:
9113 opcode = MINUS_EXPR;
9114 /* FALLTHRU */
9115 case BUILT_IN_SSUB_OVERFLOW:
9116 case BUILT_IN_SSUBL_OVERFLOW:
9117 case BUILT_IN_SSUBLL_OVERFLOW:
9118 case BUILT_IN_USUB_OVERFLOW:
9119 case BUILT_IN_USUBL_OVERFLOW:
9120 case BUILT_IN_USUBLL_OVERFLOW:
9121 ifn = IFN_SUB_OVERFLOW;
9122 break;
9123 case BUILT_IN_MUL_OVERFLOW_P:
9124 ovf_only = true;
9125 /* FALLTHRU */
9126 case BUILT_IN_MUL_OVERFLOW:
9127 opcode = MULT_EXPR;
9128 /* FALLTHRU */
9129 case BUILT_IN_SMUL_OVERFLOW:
9130 case BUILT_IN_SMULL_OVERFLOW:
9131 case BUILT_IN_SMULLL_OVERFLOW:
9132 case BUILT_IN_UMUL_OVERFLOW:
9133 case BUILT_IN_UMULL_OVERFLOW:
9134 case BUILT_IN_UMULLL_OVERFLOW:
9135 ifn = IFN_MUL_OVERFLOW;
9136 break;
9137 default:
9138 gcc_unreachable ();
9141 /* For the "generic" overloads, the first two arguments can have different
9142 types and the last argument determines the target type to use to check
9143 for overflow. The arguments of the other overloads all have the same
9144 type. */
9145 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9147 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9148 arguments are constant, attempt to fold the built-in call into a constant
9149 expression indicating whether or not it detected an overflow. */
9150 if (ovf_only
9151 && TREE_CODE (arg0) == INTEGER_CST
9152 && TREE_CODE (arg1) == INTEGER_CST)
9153 /* Perform the computation in the target type and check for overflow. */
9154 return omit_one_operand_loc (loc, boolean_type_node,
9155 arith_overflowed_p (opcode, type, arg0, arg1)
9156 ? boolean_true_node : boolean_false_node,
9157 arg2);
9159 tree ctype = build_complex_type (type);
9160 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9161 2, arg0, arg1);
9162 tree tgt = save_expr (call);
9163 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9164 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9165 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9167 if (ovf_only)
9168 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9170 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9171 tree store
9172 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9173 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9176 /* Fold a call to __builtin_FILE to a constant string. */
9178 static inline tree
9179 fold_builtin_FILE (location_t loc)
9181 if (const char *fname = LOCATION_FILE (loc))
9183 /* The documentation says this builtin is equivalent to the preprocessor
9184 __FILE__ macro so it appears appropriate to use the same file prefix
9185 mappings. */
9186 fname = remap_macro_filename (fname);
9187 return build_string_literal (strlen (fname) + 1, fname);
9190 return build_string_literal (1, "");
9193 /* Fold a call to __builtin_FUNCTION to a constant string. */
9195 static inline tree
9196 fold_builtin_FUNCTION ()
9198 const char *name = "";
9200 if (current_function_decl)
9201 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9203 return build_string_literal (strlen (name) + 1, name);
9206 /* Fold a call to __builtin_LINE to an integer constant. */
9208 static inline tree
9209 fold_builtin_LINE (location_t loc, tree type)
9211 return build_int_cst (type, LOCATION_LINE (loc));
9214 /* Fold a call to built-in function FNDECL with 0 arguments.
9215 This function returns NULL_TREE if no simplification was possible. */
9217 static tree
9218 fold_builtin_0 (location_t loc, tree fndecl)
9220 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9221 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9222 switch (fcode)
9224 case BUILT_IN_FILE:
9225 return fold_builtin_FILE (loc);
9227 case BUILT_IN_FUNCTION:
9228 return fold_builtin_FUNCTION ();
9230 case BUILT_IN_LINE:
9231 return fold_builtin_LINE (loc, type);
9233 CASE_FLT_FN (BUILT_IN_INF):
9234 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9235 case BUILT_IN_INFD32:
9236 case BUILT_IN_INFD64:
9237 case BUILT_IN_INFD128:
9238 return fold_builtin_inf (loc, type, true);
9240 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9241 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9242 return fold_builtin_inf (loc, type, false);
9244 case BUILT_IN_CLASSIFY_TYPE:
9245 return fold_builtin_classify_type (NULL_TREE);
9247 default:
9248 break;
9250 return NULL_TREE;
9253 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9254 This function returns NULL_TREE if no simplification was possible. */
9256 static tree
9257 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9259 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9260 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9262 if (TREE_CODE (arg0) == ERROR_MARK)
9263 return NULL_TREE;
9265 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9266 return ret;
9268 switch (fcode)
9270 case BUILT_IN_CONSTANT_P:
9272 tree val = fold_builtin_constant_p (arg0);
9274 /* Gimplification will pull the CALL_EXPR for the builtin out of
9275 an if condition. When not optimizing, we'll not CSE it back.
9276 To avoid link error types of regressions, return false now. */
9277 if (!val && !optimize)
9278 val = integer_zero_node;
9280 return val;
9283 case BUILT_IN_CLASSIFY_TYPE:
9284 return fold_builtin_classify_type (arg0);
9286 case BUILT_IN_STRLEN:
9287 return fold_builtin_strlen (loc, type, arg0);
9289 CASE_FLT_FN (BUILT_IN_FABS):
9290 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9291 case BUILT_IN_FABSD32:
9292 case BUILT_IN_FABSD64:
9293 case BUILT_IN_FABSD128:
9294 return fold_builtin_fabs (loc, arg0, type);
9296 case BUILT_IN_ABS:
9297 case BUILT_IN_LABS:
9298 case BUILT_IN_LLABS:
9299 case BUILT_IN_IMAXABS:
9300 return fold_builtin_abs (loc, arg0, type);
9302 CASE_FLT_FN (BUILT_IN_CONJ):
9303 if (validate_arg (arg0, COMPLEX_TYPE)
9304 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9305 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9306 break;
9308 CASE_FLT_FN (BUILT_IN_CREAL):
9309 if (validate_arg (arg0, COMPLEX_TYPE)
9310 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9311 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9312 break;
9314 CASE_FLT_FN (BUILT_IN_CIMAG):
9315 if (validate_arg (arg0, COMPLEX_TYPE)
9316 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9317 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9318 break;
9320 CASE_FLT_FN (BUILT_IN_CARG):
9321 return fold_builtin_carg (loc, arg0, type);
9323 case BUILT_IN_ISASCII:
9324 return fold_builtin_isascii (loc, arg0);
9326 case BUILT_IN_TOASCII:
9327 return fold_builtin_toascii (loc, arg0);
9329 case BUILT_IN_ISDIGIT:
9330 return fold_builtin_isdigit (loc, arg0);
9332 CASE_FLT_FN (BUILT_IN_FINITE):
9333 case BUILT_IN_FINITED32:
9334 case BUILT_IN_FINITED64:
9335 case BUILT_IN_FINITED128:
9336 case BUILT_IN_ISFINITE:
9338 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9339 if (ret)
9340 return ret;
9341 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9344 CASE_FLT_FN (BUILT_IN_ISINF):
9345 case BUILT_IN_ISINFD32:
9346 case BUILT_IN_ISINFD64:
9347 case BUILT_IN_ISINFD128:
9349 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9350 if (ret)
9351 return ret;
9352 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9355 case BUILT_IN_ISNORMAL:
9356 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9358 case BUILT_IN_ISINF_SIGN:
9359 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9361 CASE_FLT_FN (BUILT_IN_ISNAN):
9362 case BUILT_IN_ISNAND32:
9363 case BUILT_IN_ISNAND64:
9364 case BUILT_IN_ISNAND128:
9365 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9367 case BUILT_IN_FREE:
9368 if (integer_zerop (arg0))
9369 return build_empty_stmt (loc);
9370 break;
9372 default:
9373 break;
9376 return NULL_TREE;
9380 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9381 This function returns NULL_TREE if no simplification was possible. */
9383 static tree
9384 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9386 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9387 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9389 if (TREE_CODE (arg0) == ERROR_MARK
9390 || TREE_CODE (arg1) == ERROR_MARK)
9391 return NULL_TREE;
9393 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9394 return ret;
9396 switch (fcode)
9398 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9399 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9400 if (validate_arg (arg0, REAL_TYPE)
9401 && validate_arg (arg1, POINTER_TYPE))
9402 return do_mpfr_lgamma_r (arg0, arg1, type);
9403 break;
9405 CASE_FLT_FN (BUILT_IN_FREXP):
9406 return fold_builtin_frexp (loc, arg0, arg1, type);
9408 CASE_FLT_FN (BUILT_IN_MODF):
9409 return fold_builtin_modf (loc, arg0, arg1, type);
9411 case BUILT_IN_STRSPN:
9412 return fold_builtin_strspn (loc, arg0, arg1);
9414 case BUILT_IN_STRCSPN:
9415 return fold_builtin_strcspn (loc, arg0, arg1);
9417 case BUILT_IN_STRPBRK:
9418 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9420 case BUILT_IN_EXPECT:
9421 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9423 case BUILT_IN_ISGREATER:
9424 return fold_builtin_unordered_cmp (loc, fndecl,
9425 arg0, arg1, UNLE_EXPR, LE_EXPR);
9426 case BUILT_IN_ISGREATEREQUAL:
9427 return fold_builtin_unordered_cmp (loc, fndecl,
9428 arg0, arg1, UNLT_EXPR, LT_EXPR);
9429 case BUILT_IN_ISLESS:
9430 return fold_builtin_unordered_cmp (loc, fndecl,
9431 arg0, arg1, UNGE_EXPR, GE_EXPR);
9432 case BUILT_IN_ISLESSEQUAL:
9433 return fold_builtin_unordered_cmp (loc, fndecl,
9434 arg0, arg1, UNGT_EXPR, GT_EXPR);
9435 case BUILT_IN_ISLESSGREATER:
9436 return fold_builtin_unordered_cmp (loc, fndecl,
9437 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9438 case BUILT_IN_ISUNORDERED:
9439 return fold_builtin_unordered_cmp (loc, fndecl,
9440 arg0, arg1, UNORDERED_EXPR,
9441 NOP_EXPR);
9443 /* We do the folding for va_start in the expander. */
9444 case BUILT_IN_VA_START:
9445 break;
9447 case BUILT_IN_OBJECT_SIZE:
9448 return fold_builtin_object_size (arg0, arg1);
9450 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9451 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9453 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9454 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9456 default:
9457 break;
9459 return NULL_TREE;
9462 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9463 and ARG2.
9464 This function returns NULL_TREE if no simplification was possible. */
9466 static tree
9467 fold_builtin_3 (location_t loc, tree fndecl,
9468 tree arg0, tree arg1, tree arg2)
9470 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9471 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9473 if (TREE_CODE (arg0) == ERROR_MARK
9474 || TREE_CODE (arg1) == ERROR_MARK
9475 || TREE_CODE (arg2) == ERROR_MARK)
9476 return NULL_TREE;
9478 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9479 arg0, arg1, arg2))
9480 return ret;
9482 switch (fcode)
9485 CASE_FLT_FN (BUILT_IN_SINCOS):
9486 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9488 CASE_FLT_FN (BUILT_IN_REMQUO):
9489 if (validate_arg (arg0, REAL_TYPE)
9490 && validate_arg (arg1, REAL_TYPE)
9491 && validate_arg (arg2, POINTER_TYPE))
9492 return do_mpfr_remquo (arg0, arg1, arg2);
9493 break;
9495 case BUILT_IN_MEMCMP:
9496 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9498 case BUILT_IN_EXPECT:
9499 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9501 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9502 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9504 case BUILT_IN_ADD_OVERFLOW:
9505 case BUILT_IN_SUB_OVERFLOW:
9506 case BUILT_IN_MUL_OVERFLOW:
9507 case BUILT_IN_ADD_OVERFLOW_P:
9508 case BUILT_IN_SUB_OVERFLOW_P:
9509 case BUILT_IN_MUL_OVERFLOW_P:
9510 case BUILT_IN_SADD_OVERFLOW:
9511 case BUILT_IN_SADDL_OVERFLOW:
9512 case BUILT_IN_SADDLL_OVERFLOW:
9513 case BUILT_IN_SSUB_OVERFLOW:
9514 case BUILT_IN_SSUBL_OVERFLOW:
9515 case BUILT_IN_SSUBLL_OVERFLOW:
9516 case BUILT_IN_SMUL_OVERFLOW:
9517 case BUILT_IN_SMULL_OVERFLOW:
9518 case BUILT_IN_SMULLL_OVERFLOW:
9519 case BUILT_IN_UADD_OVERFLOW:
9520 case BUILT_IN_UADDL_OVERFLOW:
9521 case BUILT_IN_UADDLL_OVERFLOW:
9522 case BUILT_IN_USUB_OVERFLOW:
9523 case BUILT_IN_USUBL_OVERFLOW:
9524 case BUILT_IN_USUBLL_OVERFLOW:
9525 case BUILT_IN_UMUL_OVERFLOW:
9526 case BUILT_IN_UMULL_OVERFLOW:
9527 case BUILT_IN_UMULLL_OVERFLOW:
9528 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9530 default:
9531 break;
9533 return NULL_TREE;
9536 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9537 arguments. IGNORE is true if the result of the
9538 function call is ignored. This function returns NULL_TREE if no
9539 simplification was possible. */
9541 tree
9542 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9544 tree ret = NULL_TREE;
9546 switch (nargs)
9548 case 0:
9549 ret = fold_builtin_0 (loc, fndecl);
9550 break;
9551 case 1:
9552 ret = fold_builtin_1 (loc, fndecl, args[0]);
9553 break;
9554 case 2:
9555 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9556 break;
9557 case 3:
9558 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9559 break;
9560 default:
9561 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9562 break;
9564 if (ret)
9566 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9567 SET_EXPR_LOCATION (ret, loc);
9568 return ret;
9570 return NULL_TREE;
9573 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9574 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9575 of arguments in ARGS to be omitted. OLDNARGS is the number of
9576 elements in ARGS. */
9578 static tree
9579 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9580 int skip, tree fndecl, int n, va_list newargs)
9582 int nargs = oldnargs - skip + n;
9583 tree *buffer;
9585 if (n > 0)
9587 int i, j;
9589 buffer = XALLOCAVEC (tree, nargs);
9590 for (i = 0; i < n; i++)
9591 buffer[i] = va_arg (newargs, tree);
9592 for (j = skip; j < oldnargs; j++, i++)
9593 buffer[i] = args[j];
9595 else
9596 buffer = args + skip;
9598 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9601 /* Return true if FNDECL shouldn't be folded right now.
9602 If a built-in function has an inline attribute always_inline
9603 wrapper, defer folding it after always_inline functions have
9604 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9605 might not be performed. */
9607 bool
9608 avoid_folding_inline_builtin (tree fndecl)
9610 return (DECL_DECLARED_INLINE_P (fndecl)
9611 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9612 && cfun
9613 && !cfun->always_inline_functions_inlined
9614 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9617 /* A wrapper function for builtin folding that prevents warnings for
9618 "statement without effect" and the like, caused by removing the
9619 call node earlier than the warning is generated. */
9621 tree
9622 fold_call_expr (location_t loc, tree exp, bool ignore)
9624 tree ret = NULL_TREE;
9625 tree fndecl = get_callee_fndecl (exp);
9626 if (fndecl && fndecl_built_in_p (fndecl)
9627 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9628 yet. Defer folding until we see all the arguments
9629 (after inlining). */
9630 && !CALL_EXPR_VA_ARG_PACK (exp))
9632 int nargs = call_expr_nargs (exp);
9634 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9635 instead last argument is __builtin_va_arg_pack (). Defer folding
9636 even in that case, until arguments are finalized. */
9637 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9639 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9640 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9641 return NULL_TREE;
9644 if (avoid_folding_inline_builtin (fndecl))
9645 return NULL_TREE;
9647 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9648 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9649 CALL_EXPR_ARGP (exp), ignore);
9650 else
9652 tree *args = CALL_EXPR_ARGP (exp);
9653 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9654 if (ret)
9655 return ret;
9658 return NULL_TREE;
9661 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9662 N arguments are passed in the array ARGARRAY. Return a folded
9663 expression or NULL_TREE if no simplification was possible. */
9665 tree
9666 fold_builtin_call_array (location_t loc, tree,
9667 tree fn,
9668 int n,
9669 tree *argarray)
9671 if (TREE_CODE (fn) != ADDR_EXPR)
9672 return NULL_TREE;
9674 tree fndecl = TREE_OPERAND (fn, 0);
9675 if (TREE_CODE (fndecl) == FUNCTION_DECL
9676 && fndecl_built_in_p (fndecl))
9678 /* If last argument is __builtin_va_arg_pack (), arguments to this
9679 function are not finalized yet. Defer folding until they are. */
9680 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9682 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9683 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9684 return NULL_TREE;
9686 if (avoid_folding_inline_builtin (fndecl))
9687 return NULL_TREE;
9688 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9689 return targetm.fold_builtin (fndecl, n, argarray, false);
9690 else
9691 return fold_builtin_n (loc, fndecl, argarray, n, false);
9694 return NULL_TREE;
9697 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9698 along with N new arguments specified as the "..." parameters. SKIP
9699 is the number of arguments in EXP to be omitted. This function is used
9700 to do varargs-to-varargs transformations. */
9702 static tree
9703 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9705 va_list ap;
9706 tree t;
9708 va_start (ap, n);
9709 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9710 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9711 va_end (ap);
9713 return t;
9716 /* Validate a single argument ARG against a tree code CODE representing
9717 a type. Return true when argument is valid. */
9719 static bool
9720 validate_arg (const_tree arg, enum tree_code code)
9722 if (!arg)
9723 return false;
9724 else if (code == POINTER_TYPE)
9725 return POINTER_TYPE_P (TREE_TYPE (arg));
9726 else if (code == INTEGER_TYPE)
9727 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9728 return code == TREE_CODE (TREE_TYPE (arg));
9731 /* This function validates the types of a function call argument list
9732 against a specified list of tree_codes. If the last specifier is a 0,
9733 that represents an ellipses, otherwise the last specifier must be a
9734 VOID_TYPE.
9736 This is the GIMPLE version of validate_arglist. Eventually we want to
9737 completely convert builtins.c to work from GIMPLEs and the tree based
9738 validate_arglist will then be removed. */
9740 bool
9741 validate_gimple_arglist (const gcall *call, ...)
9743 enum tree_code code;
9744 bool res = 0;
9745 va_list ap;
9746 const_tree arg;
9747 size_t i;
9749 va_start (ap, call);
9750 i = 0;
9754 code = (enum tree_code) va_arg (ap, int);
9755 switch (code)
9757 case 0:
9758 /* This signifies an ellipses, any further arguments are all ok. */
9759 res = true;
9760 goto end;
9761 case VOID_TYPE:
9762 /* This signifies an endlink, if no arguments remain, return
9763 true, otherwise return false. */
9764 res = (i == gimple_call_num_args (call));
9765 goto end;
9766 default:
9767 /* If no parameters remain or the parameter's code does not
9768 match the specified code, return false. Otherwise continue
9769 checking any remaining arguments. */
9770 arg = gimple_call_arg (call, i++);
9771 if (!validate_arg (arg, code))
9772 goto end;
9773 break;
9776 while (1);
9778 /* We need gotos here since we can only have one VA_CLOSE in a
9779 function. */
9780 end: ;
9781 va_end (ap);
9783 return res;
9786 /* Default target-specific builtin expander that does nothing. */
9789 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9790 rtx target ATTRIBUTE_UNUSED,
9791 rtx subtarget ATTRIBUTE_UNUSED,
9792 machine_mode mode ATTRIBUTE_UNUSED,
9793 int ignore ATTRIBUTE_UNUSED)
9795 return NULL_RTX;
9798 /* Returns true is EXP represents data that would potentially reside
9799 in a readonly section. */
9801 bool
9802 readonly_data_expr (tree exp)
9804 STRIP_NOPS (exp);
9806 if (TREE_CODE (exp) != ADDR_EXPR)
9807 return false;
9809 exp = get_base_address (TREE_OPERAND (exp, 0));
9810 if (!exp)
9811 return false;
9813 /* Make sure we call decl_readonly_section only for trees it
9814 can handle (since it returns true for everything it doesn't
9815 understand). */
9816 if (TREE_CODE (exp) == STRING_CST
9817 || TREE_CODE (exp) == CONSTRUCTOR
9818 || (VAR_P (exp) && TREE_STATIC (exp)))
9819 return decl_readonly_section (exp, 0);
9820 else
9821 return false;
9824 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9825 to the call, and TYPE is its return type.
9827 Return NULL_TREE if no simplification was possible, otherwise return the
9828 simplified form of the call as a tree.
9830 The simplified form may be a constant or other expression which
9831 computes the same value, but in a more efficient manner (including
9832 calls to other builtin functions).
9834 The call may contain arguments which need to be evaluated, but
9835 which are not useful to determine the result of the call. In
9836 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9837 COMPOUND_EXPR will be an argument which must be evaluated.
9838 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9839 COMPOUND_EXPR in the chain will contain the tree for the simplified
9840 form of the builtin function call. */
9842 static tree
9843 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9845 if (!validate_arg (s1, POINTER_TYPE)
9846 || !validate_arg (s2, POINTER_TYPE))
9847 return NULL_TREE;
9848 else
9850 tree fn;
9851 const char *p1, *p2;
9853 p2 = c_getstr (s2);
9854 if (p2 == NULL)
9855 return NULL_TREE;
9857 p1 = c_getstr (s1);
9858 if (p1 != NULL)
9860 const char *r = strpbrk (p1, p2);
9861 tree tem;
9863 if (r == NULL)
9864 return build_int_cst (TREE_TYPE (s1), 0);
9866 /* Return an offset into the constant string argument. */
9867 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9868 return fold_convert_loc (loc, type, tem);
9871 if (p2[0] == '\0')
9872 /* strpbrk(x, "") == NULL.
9873 Evaluate and ignore s1 in case it had side-effects. */
9874 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
9876 if (p2[1] != '\0')
9877 return NULL_TREE; /* Really call strpbrk. */
9879 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9880 if (!fn)
9881 return NULL_TREE;
9883 /* New argument list transforming strpbrk(s1, s2) to
9884 strchr(s1, s2[0]). */
9885 return build_call_expr_loc (loc, fn, 2, s1,
9886 build_int_cst (integer_type_node, p2[0]));
9890 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9891 to the call.
9893 Return NULL_TREE if no simplification was possible, otherwise return the
9894 simplified form of the call as a tree.
9896 The simplified form may be a constant or other expression which
9897 computes the same value, but in a more efficient manner (including
9898 calls to other builtin functions).
9900 The call may contain arguments which need to be evaluated, but
9901 which are not useful to determine the result of the call. In
9902 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9903 COMPOUND_EXPR will be an argument which must be evaluated.
9904 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9905 COMPOUND_EXPR in the chain will contain the tree for the simplified
9906 form of the builtin function call. */
9908 static tree
9909 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9911 if (!validate_arg (s1, POINTER_TYPE)
9912 || !validate_arg (s2, POINTER_TYPE))
9913 return NULL_TREE;
9914 else
9916 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9918 /* If either argument is "", return NULL_TREE. */
9919 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9920 /* Evaluate and ignore both arguments in case either one has
9921 side-effects. */
9922 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9923 s1, s2);
9924 return NULL_TREE;
9928 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9929 to the call.
9931 Return NULL_TREE if no simplification was possible, otherwise return the
9932 simplified form of the call as a tree.
9934 The simplified form may be a constant or other expression which
9935 computes the same value, but in a more efficient manner (including
9936 calls to other builtin functions).
9938 The call may contain arguments which need to be evaluated, but
9939 which are not useful to determine the result of the call. In
9940 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9941 COMPOUND_EXPR will be an argument which must be evaluated.
9942 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9943 COMPOUND_EXPR in the chain will contain the tree for the simplified
9944 form of the builtin function call. */
9946 static tree
9947 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9949 if (!validate_arg (s1, POINTER_TYPE)
9950 || !validate_arg (s2, POINTER_TYPE))
9951 return NULL_TREE;
9952 else
9954 /* If the first argument is "", return NULL_TREE. */
9955 const char *p1 = c_getstr (s1);
9956 if (p1 && *p1 == '\0')
9958 /* Evaluate and ignore argument s2 in case it has
9959 side-effects. */
9960 return omit_one_operand_loc (loc, size_type_node,
9961 size_zero_node, s2);
9964 /* If the second argument is "", return __builtin_strlen(s1). */
9965 const char *p2 = c_getstr (s2);
9966 if (p2 && *p2 == '\0')
9968 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9970 /* If the replacement _DECL isn't initialized, don't do the
9971 transformation. */
9972 if (!fn)
9973 return NULL_TREE;
9975 return build_call_expr_loc (loc, fn, 1, s1);
9977 return NULL_TREE;
9981 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9982 produced. False otherwise. This is done so that we don't output the error
9983 or warning twice or three times. */
9985 bool
9986 fold_builtin_next_arg (tree exp, bool va_start_p)
9988 tree fntype = TREE_TYPE (current_function_decl);
9989 int nargs = call_expr_nargs (exp);
9990 tree arg;
9991 /* There is good chance the current input_location points inside the
9992 definition of the va_start macro (perhaps on the token for
9993 builtin) in a system header, so warnings will not be emitted.
9994 Use the location in real source code. */
9995 source_location current_location =
9996 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9997 NULL);
9999 if (!stdarg_p (fntype))
10001 error ("%<va_start%> used in function with fixed args");
10002 return true;
10005 if (va_start_p)
10007 if (va_start_p && (nargs != 2))
10009 error ("wrong number of arguments to function %<va_start%>");
10010 return true;
10012 arg = CALL_EXPR_ARG (exp, 1);
10014 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10015 when we checked the arguments and if needed issued a warning. */
10016 else
10018 if (nargs == 0)
10020 /* Evidently an out of date version of <stdarg.h>; can't validate
10021 va_start's second argument, but can still work as intended. */
10022 warning_at (current_location,
10023 OPT_Wvarargs,
10024 "%<__builtin_next_arg%> called without an argument");
10025 return true;
10027 else if (nargs > 1)
10029 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10030 return true;
10032 arg = CALL_EXPR_ARG (exp, 0);
10035 if (TREE_CODE (arg) == SSA_NAME)
10036 arg = SSA_NAME_VAR (arg);
10038 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10039 or __builtin_next_arg (0) the first time we see it, after checking
10040 the arguments and if needed issuing a warning. */
10041 if (!integer_zerop (arg))
10043 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10045 /* Strip off all nops for the sake of the comparison. This
10046 is not quite the same as STRIP_NOPS. It does more.
10047 We must also strip off INDIRECT_EXPR for C++ reference
10048 parameters. */
10049 while (CONVERT_EXPR_P (arg)
10050 || TREE_CODE (arg) == INDIRECT_REF)
10051 arg = TREE_OPERAND (arg, 0);
10052 if (arg != last_parm)
10054 /* FIXME: Sometimes with the tree optimizers we can get the
10055 not the last argument even though the user used the last
10056 argument. We just warn and set the arg to be the last
10057 argument so that we will get wrong-code because of
10058 it. */
10059 warning_at (current_location,
10060 OPT_Wvarargs,
10061 "second parameter of %<va_start%> not last named argument");
10064 /* Undefined by C99 7.15.1.4p4 (va_start):
10065 "If the parameter parmN is declared with the register storage
10066 class, with a function or array type, or with a type that is
10067 not compatible with the type that results after application of
10068 the default argument promotions, the behavior is undefined."
10070 else if (DECL_REGISTER (arg))
10072 warning_at (current_location,
10073 OPT_Wvarargs,
10074 "undefined behavior when second parameter of "
10075 "%<va_start%> is declared with %<register%> storage");
10078 /* We want to verify the second parameter just once before the tree
10079 optimizers are run and then avoid keeping it in the tree,
10080 as otherwise we could warn even for correct code like:
10081 void foo (int i, ...)
10082 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10083 if (va_start_p)
10084 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10085 else
10086 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10088 return false;
10092 /* Expand a call EXP to __builtin_object_size. */
10094 static rtx
10095 expand_builtin_object_size (tree exp)
10097 tree ost;
10098 int object_size_type;
10099 tree fndecl = get_callee_fndecl (exp);
10101 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10103 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10104 exp, fndecl);
10105 expand_builtin_trap ();
10106 return const0_rtx;
10109 ost = CALL_EXPR_ARG (exp, 1);
10110 STRIP_NOPS (ost);
10112 if (TREE_CODE (ost) != INTEGER_CST
10113 || tree_int_cst_sgn (ost) < 0
10114 || compare_tree_int (ost, 3) > 0)
10116 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10117 exp, fndecl);
10118 expand_builtin_trap ();
10119 return const0_rtx;
10122 object_size_type = tree_to_shwi (ost);
10124 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10127 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10128 FCODE is the BUILT_IN_* to use.
10129 Return NULL_RTX if we failed; the caller should emit a normal call,
10130 otherwise try to get the result in TARGET, if convenient (and in
10131 mode MODE if that's convenient). */
10133 static rtx
10134 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10135 enum built_in_function fcode)
10137 if (!validate_arglist (exp,
10138 POINTER_TYPE,
10139 fcode == BUILT_IN_MEMSET_CHK
10140 ? INTEGER_TYPE : POINTER_TYPE,
10141 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10142 return NULL_RTX;
10144 tree dest = CALL_EXPR_ARG (exp, 0);
10145 tree src = CALL_EXPR_ARG (exp, 1);
10146 tree len = CALL_EXPR_ARG (exp, 2);
10147 tree size = CALL_EXPR_ARG (exp, 3);
10149 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10150 /*str=*/NULL_TREE, size);
10152 if (!tree_fits_uhwi_p (size))
10153 return NULL_RTX;
10155 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10157 /* Avoid transforming the checking call to an ordinary one when
10158 an overflow has been detected or when the call couldn't be
10159 validated because the size is not constant. */
10160 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10161 return NULL_RTX;
10163 tree fn = NULL_TREE;
10164 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10165 mem{cpy,pcpy,move,set} is available. */
10166 switch (fcode)
10168 case BUILT_IN_MEMCPY_CHK:
10169 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10170 break;
10171 case BUILT_IN_MEMPCPY_CHK:
10172 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10173 break;
10174 case BUILT_IN_MEMMOVE_CHK:
10175 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10176 break;
10177 case BUILT_IN_MEMSET_CHK:
10178 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10179 break;
10180 default:
10181 break;
10184 if (! fn)
10185 return NULL_RTX;
10187 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10188 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10189 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10190 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10192 else if (fcode == BUILT_IN_MEMSET_CHK)
10193 return NULL_RTX;
10194 else
10196 unsigned int dest_align = get_pointer_alignment (dest);
10198 /* If DEST is not a pointer type, call the normal function. */
10199 if (dest_align == 0)
10200 return NULL_RTX;
10202 /* If SRC and DEST are the same (and not volatile), do nothing. */
10203 if (operand_equal_p (src, dest, 0))
10205 tree expr;
10207 if (fcode != BUILT_IN_MEMPCPY_CHK)
10209 /* Evaluate and ignore LEN in case it has side-effects. */
10210 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10211 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10214 expr = fold_build_pointer_plus (dest, len);
10215 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10218 /* __memmove_chk special case. */
10219 if (fcode == BUILT_IN_MEMMOVE_CHK)
10221 unsigned int src_align = get_pointer_alignment (src);
10223 if (src_align == 0)
10224 return NULL_RTX;
10226 /* If src is categorized for a readonly section we can use
10227 normal __memcpy_chk. */
10228 if (readonly_data_expr (src))
10230 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10231 if (!fn)
10232 return NULL_RTX;
10233 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10234 dest, src, len, size);
10235 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10236 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10237 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10240 return NULL_RTX;
10244 /* Emit warning if a buffer overflow is detected at compile time. */
10246 static void
10247 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10249 /* The source string. */
10250 tree srcstr = NULL_TREE;
10251 /* The size of the destination object. */
10252 tree objsize = NULL_TREE;
10253 /* The string that is being concatenated with (as in __strcat_chk)
10254 or null if it isn't. */
10255 tree catstr = NULL_TREE;
10256 /* The maximum length of the source sequence in a bounded operation
10257 (such as __strncat_chk) or null if the operation isn't bounded
10258 (such as __strcat_chk). */
10259 tree maxread = NULL_TREE;
10260 /* The exact size of the access (such as in __strncpy_chk). */
10261 tree size = NULL_TREE;
10263 switch (fcode)
10265 case BUILT_IN_STRCPY_CHK:
10266 case BUILT_IN_STPCPY_CHK:
10267 srcstr = CALL_EXPR_ARG (exp, 1);
10268 objsize = CALL_EXPR_ARG (exp, 2);
10269 break;
10271 case BUILT_IN_STRCAT_CHK:
10272 /* For __strcat_chk the warning will be emitted only if overflowing
10273 by at least strlen (dest) + 1 bytes. */
10274 catstr = CALL_EXPR_ARG (exp, 0);
10275 srcstr = CALL_EXPR_ARG (exp, 1);
10276 objsize = CALL_EXPR_ARG (exp, 2);
10277 break;
10279 case BUILT_IN_STRNCAT_CHK:
10280 catstr = CALL_EXPR_ARG (exp, 0);
10281 srcstr = CALL_EXPR_ARG (exp, 1);
10282 maxread = CALL_EXPR_ARG (exp, 2);
10283 objsize = CALL_EXPR_ARG (exp, 3);
10284 break;
10286 case BUILT_IN_STRNCPY_CHK:
10287 case BUILT_IN_STPNCPY_CHK:
10288 srcstr = CALL_EXPR_ARG (exp, 1);
10289 size = CALL_EXPR_ARG (exp, 2);
10290 objsize = CALL_EXPR_ARG (exp, 3);
10291 break;
10293 case BUILT_IN_SNPRINTF_CHK:
10294 case BUILT_IN_VSNPRINTF_CHK:
10295 maxread = CALL_EXPR_ARG (exp, 1);
10296 objsize = CALL_EXPR_ARG (exp, 3);
10297 break;
10298 default:
10299 gcc_unreachable ();
10302 if (catstr && maxread)
10304 /* Check __strncat_chk. There is no way to determine the length
10305 of the string to which the source string is being appended so
10306 just warn when the length of the source string is not known. */
10307 check_strncat_sizes (exp, objsize);
10308 return;
10311 /* The destination argument is the first one for all built-ins above. */
10312 tree dst = CALL_EXPR_ARG (exp, 0);
10314 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10317 /* Emit warning if a buffer overflow is detected at compile time
10318 in __sprintf_chk/__vsprintf_chk calls. */
10320 static void
10321 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10323 tree size, len, fmt;
10324 const char *fmt_str;
10325 int nargs = call_expr_nargs (exp);
10327 /* Verify the required arguments in the original call. */
10329 if (nargs < 4)
10330 return;
10331 size = CALL_EXPR_ARG (exp, 2);
10332 fmt = CALL_EXPR_ARG (exp, 3);
10334 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10335 return;
10337 /* Check whether the format is a literal string constant. */
10338 fmt_str = c_getstr (fmt);
10339 if (fmt_str == NULL)
10340 return;
10342 if (!init_target_chars ())
10343 return;
10345 /* If the format doesn't contain % args or %%, we know its size. */
10346 if (strchr (fmt_str, target_percent) == 0)
10347 len = build_int_cstu (size_type_node, strlen (fmt_str));
10348 /* If the format is "%s" and first ... argument is a string literal,
10349 we know it too. */
10350 else if (fcode == BUILT_IN_SPRINTF_CHK
10351 && strcmp (fmt_str, target_percent_s) == 0)
10353 tree arg;
10355 if (nargs < 5)
10356 return;
10357 arg = CALL_EXPR_ARG (exp, 4);
10358 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10359 return;
10361 len = c_strlen (arg, 1);
10362 if (!len || ! tree_fits_uhwi_p (len))
10363 return;
10365 else
10366 return;
10368 /* Add one for the terminating nul. */
10369 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10371 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10372 /*maxread=*/NULL_TREE, len, size);
10375 /* Emit warning if a free is called with address of a variable. */
10377 static void
10378 maybe_emit_free_warning (tree exp)
10380 tree arg = CALL_EXPR_ARG (exp, 0);
10382 STRIP_NOPS (arg);
10383 if (TREE_CODE (arg) != ADDR_EXPR)
10384 return;
10386 arg = get_base_address (TREE_OPERAND (arg, 0));
10387 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10388 return;
10390 if (SSA_VAR_P (arg))
10391 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10392 "%Kattempt to free a non-heap object %qD", exp, arg);
10393 else
10394 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10395 "%Kattempt to free a non-heap object", exp);
10398 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10399 if possible. */
10401 static tree
10402 fold_builtin_object_size (tree ptr, tree ost)
10404 unsigned HOST_WIDE_INT bytes;
10405 int object_size_type;
10407 if (!validate_arg (ptr, POINTER_TYPE)
10408 || !validate_arg (ost, INTEGER_TYPE))
10409 return NULL_TREE;
10411 STRIP_NOPS (ost);
10413 if (TREE_CODE (ost) != INTEGER_CST
10414 || tree_int_cst_sgn (ost) < 0
10415 || compare_tree_int (ost, 3) > 0)
10416 return NULL_TREE;
10418 object_size_type = tree_to_shwi (ost);
10420 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10421 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10422 and (size_t) 0 for types 2 and 3. */
10423 if (TREE_SIDE_EFFECTS (ptr))
10424 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10426 if (TREE_CODE (ptr) == ADDR_EXPR)
10428 compute_builtin_object_size (ptr, object_size_type, &bytes);
10429 if (wi::fits_to_tree_p (bytes, size_type_node))
10430 return build_int_cstu (size_type_node, bytes);
10432 else if (TREE_CODE (ptr) == SSA_NAME)
10434 /* If object size is not known yet, delay folding until
10435 later. Maybe subsequent passes will help determining
10436 it. */
10437 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10438 && wi::fits_to_tree_p (bytes, size_type_node))
10439 return build_int_cstu (size_type_node, bytes);
10442 return NULL_TREE;
10445 /* Builtins with folding operations that operate on "..." arguments
10446 need special handling; we need to store the arguments in a convenient
10447 data structure before attempting any folding. Fortunately there are
10448 only a few builtins that fall into this category. FNDECL is the
10449 function, EXP is the CALL_EXPR for the call. */
10451 static tree
10452 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10454 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10455 tree ret = NULL_TREE;
10457 switch (fcode)
10459 case BUILT_IN_FPCLASSIFY:
10460 ret = fold_builtin_fpclassify (loc, args, nargs);
10461 break;
10463 default:
10464 break;
10466 if (ret)
10468 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10469 SET_EXPR_LOCATION (ret, loc);
10470 TREE_NO_WARNING (ret) = 1;
10471 return ret;
10473 return NULL_TREE;
10476 /* Initialize format string characters in the target charset. */
10478 bool
10479 init_target_chars (void)
10481 static bool init;
10482 if (!init)
10484 target_newline = lang_hooks.to_target_charset ('\n');
10485 target_percent = lang_hooks.to_target_charset ('%');
10486 target_c = lang_hooks.to_target_charset ('c');
10487 target_s = lang_hooks.to_target_charset ('s');
10488 if (target_newline == 0 || target_percent == 0 || target_c == 0
10489 || target_s == 0)
10490 return false;
10492 target_percent_c[0] = target_percent;
10493 target_percent_c[1] = target_c;
10494 target_percent_c[2] = '\0';
10496 target_percent_s[0] = target_percent;
10497 target_percent_s[1] = target_s;
10498 target_percent_s[2] = '\0';
10500 target_percent_s_newline[0] = target_percent;
10501 target_percent_s_newline[1] = target_s;
10502 target_percent_s_newline[2] = target_newline;
10503 target_percent_s_newline[3] = '\0';
10505 init = true;
10507 return true;
10510 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10511 and no overflow/underflow occurred. INEXACT is true if M was not
10512 exactly calculated. TYPE is the tree type for the result. This
10513 function assumes that you cleared the MPFR flags and then
10514 calculated M to see if anything subsequently set a flag prior to
10515 entering this function. Return NULL_TREE if any checks fail. */
10517 static tree
10518 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10520 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10521 overflow/underflow occurred. If -frounding-math, proceed iff the
10522 result of calling FUNC was exact. */
10523 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10524 && (!flag_rounding_math || !inexact))
10526 REAL_VALUE_TYPE rr;
10528 real_from_mpfr (&rr, m, type, GMP_RNDN);
10529 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10530 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10531 but the mpft_t is not, then we underflowed in the
10532 conversion. */
10533 if (real_isfinite (&rr)
10534 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10536 REAL_VALUE_TYPE rmode;
10538 real_convert (&rmode, TYPE_MODE (type), &rr);
10539 /* Proceed iff the specified mode can hold the value. */
10540 if (real_identical (&rmode, &rr))
10541 return build_real (type, rmode);
10544 return NULL_TREE;
10547 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10548 number and no overflow/underflow occurred. INEXACT is true if M
10549 was not exactly calculated. TYPE is the tree type for the result.
10550 This function assumes that you cleared the MPFR flags and then
10551 calculated M to see if anything subsequently set a flag prior to
10552 entering this function. Return NULL_TREE if any checks fail, if
10553 FORCE_CONVERT is true, then bypass the checks. */
10555 static tree
10556 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10558 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10559 overflow/underflow occurred. If -frounding-math, proceed iff the
10560 result of calling FUNC was exact. */
10561 if (force_convert
10562 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10563 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10564 && (!flag_rounding_math || !inexact)))
10566 REAL_VALUE_TYPE re, im;
10568 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10569 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10570 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10571 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10572 but the mpft_t is not, then we underflowed in the
10573 conversion. */
10574 if (force_convert
10575 || (real_isfinite (&re) && real_isfinite (&im)
10576 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10577 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10579 REAL_VALUE_TYPE re_mode, im_mode;
10581 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10582 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10583 /* Proceed iff the specified mode can hold the value. */
10584 if (force_convert
10585 || (real_identical (&re_mode, &re)
10586 && real_identical (&im_mode, &im)))
10587 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10588 build_real (TREE_TYPE (type), im_mode));
10591 return NULL_TREE;
10594 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10595 the pointer *(ARG_QUO) and return the result. The type is taken
10596 from the type of ARG0 and is used for setting the precision of the
10597 calculation and results. */
10599 static tree
10600 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10602 tree const type = TREE_TYPE (arg0);
10603 tree result = NULL_TREE;
10605 STRIP_NOPS (arg0);
10606 STRIP_NOPS (arg1);
10608 /* To proceed, MPFR must exactly represent the target floating point
10609 format, which only happens when the target base equals two. */
10610 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10611 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10612 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10614 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10615 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10617 if (real_isfinite (ra0) && real_isfinite (ra1))
10619 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10620 const int prec = fmt->p;
10621 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10622 tree result_rem;
10623 long integer_quo;
10624 mpfr_t m0, m1;
10626 mpfr_inits2 (prec, m0, m1, NULL);
10627 mpfr_from_real (m0, ra0, GMP_RNDN);
10628 mpfr_from_real (m1, ra1, GMP_RNDN);
10629 mpfr_clear_flags ();
10630 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10631 /* Remquo is independent of the rounding mode, so pass
10632 inexact=0 to do_mpfr_ckconv(). */
10633 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10634 mpfr_clears (m0, m1, NULL);
10635 if (result_rem)
10637 /* MPFR calculates quo in the host's long so it may
10638 return more bits in quo than the target int can hold
10639 if sizeof(host long) > sizeof(target int). This can
10640 happen even for native compilers in LP64 mode. In
10641 these cases, modulo the quo value with the largest
10642 number that the target int can hold while leaving one
10643 bit for the sign. */
10644 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10645 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10647 /* Dereference the quo pointer argument. */
10648 arg_quo = build_fold_indirect_ref (arg_quo);
10649 /* Proceed iff a valid pointer type was passed in. */
10650 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10652 /* Set the value. */
10653 tree result_quo
10654 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10655 build_int_cst (TREE_TYPE (arg_quo),
10656 integer_quo));
10657 TREE_SIDE_EFFECTS (result_quo) = 1;
10658 /* Combine the quo assignment with the rem. */
10659 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10660 result_quo, result_rem));
10665 return result;
10668 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10669 resulting value as a tree with type TYPE. The mpfr precision is
10670 set to the precision of TYPE. We assume that this mpfr function
10671 returns zero if the result could be calculated exactly within the
10672 requested precision. In addition, the integer pointer represented
10673 by ARG_SG will be dereferenced and set to the appropriate signgam
10674 (-1,1) value. */
10676 static tree
10677 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10679 tree result = NULL_TREE;
10681 STRIP_NOPS (arg);
10683 /* To proceed, MPFR must exactly represent the target floating point
10684 format, which only happens when the target base equals two. Also
10685 verify ARG is a constant and that ARG_SG is an int pointer. */
10686 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10687 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10688 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10689 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10691 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10693 /* In addition to NaN and Inf, the argument cannot be zero or a
10694 negative integer. */
10695 if (real_isfinite (ra)
10696 && ra->cl != rvc_zero
10697 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10699 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10700 const int prec = fmt->p;
10701 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10702 int inexact, sg;
10703 mpfr_t m;
10704 tree result_lg;
10706 mpfr_init2 (m, prec);
10707 mpfr_from_real (m, ra, GMP_RNDN);
10708 mpfr_clear_flags ();
10709 inexact = mpfr_lgamma (m, &sg, m, rnd);
10710 result_lg = do_mpfr_ckconv (m, type, inexact);
10711 mpfr_clear (m);
10712 if (result_lg)
10714 tree result_sg;
10716 /* Dereference the arg_sg pointer argument. */
10717 arg_sg = build_fold_indirect_ref (arg_sg);
10718 /* Assign the signgam value into *arg_sg. */
10719 result_sg = fold_build2 (MODIFY_EXPR,
10720 TREE_TYPE (arg_sg), arg_sg,
10721 build_int_cst (TREE_TYPE (arg_sg), sg));
10722 TREE_SIDE_EFFECTS (result_sg) = 1;
10723 /* Combine the signgam assignment with the lgamma result. */
10724 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10725 result_sg, result_lg));
10730 return result;
10733 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10734 mpc function FUNC on it and return the resulting value as a tree
10735 with type TYPE. The mpfr precision is set to the precision of
10736 TYPE. We assume that function FUNC returns zero if the result
10737 could be calculated exactly within the requested precision. If
10738 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10739 in the arguments and/or results. */
10741 tree
10742 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10743 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10745 tree result = NULL_TREE;
10747 STRIP_NOPS (arg0);
10748 STRIP_NOPS (arg1);
10750 /* To proceed, MPFR must exactly represent the target floating point
10751 format, which only happens when the target base equals two. */
10752 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10753 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10754 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10755 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10756 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10758 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10759 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10760 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10761 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10763 if (do_nonfinite
10764 || (real_isfinite (re0) && real_isfinite (im0)
10765 && real_isfinite (re1) && real_isfinite (im1)))
10767 const struct real_format *const fmt =
10768 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10769 const int prec = fmt->p;
10770 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10771 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10772 int inexact;
10773 mpc_t m0, m1;
10775 mpc_init2 (m0, prec);
10776 mpc_init2 (m1, prec);
10777 mpfr_from_real (mpc_realref (m0), re0, rnd);
10778 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10779 mpfr_from_real (mpc_realref (m1), re1, rnd);
10780 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10781 mpfr_clear_flags ();
10782 inexact = func (m0, m0, m1, crnd);
10783 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10784 mpc_clear (m0);
10785 mpc_clear (m1);
10789 return result;
10792 /* A wrapper function for builtin folding that prevents warnings for
10793 "statement without effect" and the like, caused by removing the
10794 call node earlier than the warning is generated. */
10796 tree
10797 fold_call_stmt (gcall *stmt, bool ignore)
10799 tree ret = NULL_TREE;
10800 tree fndecl = gimple_call_fndecl (stmt);
10801 location_t loc = gimple_location (stmt);
10802 if (fndecl && fndecl_built_in_p (fndecl)
10803 && !gimple_call_va_arg_pack_p (stmt))
10805 int nargs = gimple_call_num_args (stmt);
10806 tree *args = (nargs > 0
10807 ? gimple_call_arg_ptr (stmt, 0)
10808 : &error_mark_node);
10810 if (avoid_folding_inline_builtin (fndecl))
10811 return NULL_TREE;
10812 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10814 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10816 else
10818 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10819 if (ret)
10821 /* Propagate location information from original call to
10822 expansion of builtin. Otherwise things like
10823 maybe_emit_chk_warning, that operate on the expansion
10824 of a builtin, will use the wrong location information. */
10825 if (gimple_has_location (stmt))
10827 tree realret = ret;
10828 if (TREE_CODE (ret) == NOP_EXPR)
10829 realret = TREE_OPERAND (ret, 0);
10830 if (CAN_HAVE_LOCATION_P (realret)
10831 && !EXPR_HAS_LOCATION (realret))
10832 SET_EXPR_LOCATION (realret, loc);
10833 return realret;
10835 return ret;
10839 return NULL_TREE;
10842 /* Look up the function in builtin_decl that corresponds to DECL
10843 and set ASMSPEC as its user assembler name. DECL must be a
10844 function decl that declares a builtin. */
10846 void
10847 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10849 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
10850 && asmspec != 0);
10852 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10853 set_user_assembler_name (builtin, asmspec);
10855 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10856 && INT_TYPE_SIZE < BITS_PER_WORD)
10858 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10859 set_user_assembler_libfunc ("ffs", asmspec);
10860 set_optab_libfunc (ffs_optab, mode, "ffs");
10864 /* Return true if DECL is a builtin that expands to a constant or similarly
10865 simple code. */
10866 bool
10867 is_simple_builtin (tree decl)
10869 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
10870 switch (DECL_FUNCTION_CODE (decl))
10872 /* Builtins that expand to constants. */
10873 case BUILT_IN_CONSTANT_P:
10874 case BUILT_IN_EXPECT:
10875 case BUILT_IN_OBJECT_SIZE:
10876 case BUILT_IN_UNREACHABLE:
10877 /* Simple register moves or loads from stack. */
10878 case BUILT_IN_ASSUME_ALIGNED:
10879 case BUILT_IN_RETURN_ADDRESS:
10880 case BUILT_IN_EXTRACT_RETURN_ADDR:
10881 case BUILT_IN_FROB_RETURN_ADDR:
10882 case BUILT_IN_RETURN:
10883 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10884 case BUILT_IN_FRAME_ADDRESS:
10885 case BUILT_IN_VA_END:
10886 case BUILT_IN_STACK_SAVE:
10887 case BUILT_IN_STACK_RESTORE:
10888 /* Exception state returns or moves registers around. */
10889 case BUILT_IN_EH_FILTER:
10890 case BUILT_IN_EH_POINTER:
10891 case BUILT_IN_EH_COPY_VALUES:
10892 return true;
10894 default:
10895 return false;
10898 return false;
10901 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10902 most probably expanded inline into reasonably simple code. This is a
10903 superset of is_simple_builtin. */
10904 bool
10905 is_inexpensive_builtin (tree decl)
10907 if (!decl)
10908 return false;
10909 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10910 return true;
10911 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10912 switch (DECL_FUNCTION_CODE (decl))
10914 case BUILT_IN_ABS:
10915 CASE_BUILT_IN_ALLOCA:
10916 case BUILT_IN_BSWAP16:
10917 case BUILT_IN_BSWAP32:
10918 case BUILT_IN_BSWAP64:
10919 case BUILT_IN_CLZ:
10920 case BUILT_IN_CLZIMAX:
10921 case BUILT_IN_CLZL:
10922 case BUILT_IN_CLZLL:
10923 case BUILT_IN_CTZ:
10924 case BUILT_IN_CTZIMAX:
10925 case BUILT_IN_CTZL:
10926 case BUILT_IN_CTZLL:
10927 case BUILT_IN_FFS:
10928 case BUILT_IN_FFSIMAX:
10929 case BUILT_IN_FFSL:
10930 case BUILT_IN_FFSLL:
10931 case BUILT_IN_IMAXABS:
10932 case BUILT_IN_FINITE:
10933 case BUILT_IN_FINITEF:
10934 case BUILT_IN_FINITEL:
10935 case BUILT_IN_FINITED32:
10936 case BUILT_IN_FINITED64:
10937 case BUILT_IN_FINITED128:
10938 case BUILT_IN_FPCLASSIFY:
10939 case BUILT_IN_ISFINITE:
10940 case BUILT_IN_ISINF_SIGN:
10941 case BUILT_IN_ISINF:
10942 case BUILT_IN_ISINFF:
10943 case BUILT_IN_ISINFL:
10944 case BUILT_IN_ISINFD32:
10945 case BUILT_IN_ISINFD64:
10946 case BUILT_IN_ISINFD128:
10947 case BUILT_IN_ISNAN:
10948 case BUILT_IN_ISNANF:
10949 case BUILT_IN_ISNANL:
10950 case BUILT_IN_ISNAND32:
10951 case BUILT_IN_ISNAND64:
10952 case BUILT_IN_ISNAND128:
10953 case BUILT_IN_ISNORMAL:
10954 case BUILT_IN_ISGREATER:
10955 case BUILT_IN_ISGREATEREQUAL:
10956 case BUILT_IN_ISLESS:
10957 case BUILT_IN_ISLESSEQUAL:
10958 case BUILT_IN_ISLESSGREATER:
10959 case BUILT_IN_ISUNORDERED:
10960 case BUILT_IN_VA_ARG_PACK:
10961 case BUILT_IN_VA_ARG_PACK_LEN:
10962 case BUILT_IN_VA_COPY:
10963 case BUILT_IN_TRAP:
10964 case BUILT_IN_SAVEREGS:
10965 case BUILT_IN_POPCOUNTL:
10966 case BUILT_IN_POPCOUNTLL:
10967 case BUILT_IN_POPCOUNTIMAX:
10968 case BUILT_IN_POPCOUNT:
10969 case BUILT_IN_PARITYL:
10970 case BUILT_IN_PARITYLL:
10971 case BUILT_IN_PARITYIMAX:
10972 case BUILT_IN_PARITY:
10973 case BUILT_IN_LABS:
10974 case BUILT_IN_LLABS:
10975 case BUILT_IN_PREFETCH:
10976 case BUILT_IN_ACC_ON_DEVICE:
10977 return true;
10979 default:
10980 return is_simple_builtin (decl);
10983 return false;
10986 /* Return true if T is a constant and the value cast to a target char
10987 can be represented by a host char.
10988 Store the casted char constant in *P if so. */
10990 bool
10991 target_char_cst_p (tree t, char *p)
10993 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10994 return false;
10996 *p = (char)tree_to_uhwi (t);
10997 return true;
11000 /* Return the maximum object size. */
11002 tree
11003 max_object_size (void)
11005 /* To do: Make this a configurable parameter. */
11006 return TYPE_MAX_VALUE (ptrdiff_type_node);